Commit cb0a1198 authored by gijs's avatar gijs
Browse files

Start of the generator.

parent 151869c3
from urllib.request import URLError, urlopen
from urllib.parse import urlencode, urljoin
from settings import CACHE_DIR, API_URL, DEBUG, MAX_PAGES, PER_PAGE
import os.path
import os
import json
import datetime
CACHE_DIR = os.path.join(os.path.dirname(__file__), 'cache')
API_URL = "https://gitlab.constantvzw.org/api/v4/"
GROUP_ID = 8
DEBUG = True
MAX_PAGES = 100
PER_PAGE = 100
def debug (msg):
if DEBUG:
......@@ -25,14 +20,16 @@ class ApiError(Exception):
return '(%s) => %s'%(self.url,self.what)
class ApiCall (object):
def __init__ (self, api_path, api_url = API_URL, cache_dir = CACHE_DIR):
def __init__ (self, api_path, query = None, api_url = API_URL, cache_dir = CACHE_DIR, paged = True):
self.paged = paged
self._cache = None
self.api_path = list(map(str, api_path))
self.api_url = api_url
self.cache_dir = cache_dir
self.query = query
@property
def cache_file(self):
def cache_location(self):
return os.path.join(self.cache_dir, '{}.json'.format('.'.join(self.api_path)))
@property
......@@ -41,83 +38,132 @@ class ApiCall (object):
@property
def has_cache(self):
return os.path.exists(self.cache_file)
return os.path.exists(self.cache_location)
def invalidate_cache(self):
if self.has_cache:
os.unlink(self.cache_file)
os.unlink(self.cache_location)
def make_cache(self):
def write_cache(self, data):
try:
obj = self.get_api()
json.dump(obj, open(self.cache_file, 'w'))
with open(self.cache_location, 'w') as h:
h.write(self.prepare_cache(data))
except ApiError as e:
json.dump({
'reason': e.what,
'timestamp': datetime.datetime.now().timestamp()
}, open(self.cache_file, 'w'))
obj = []
}, open(self.cache_location, 'w'))
data = []
self._cache = obj
self._cache = data
def load_cache(self):
def read_cache(self):
debug('Hit cache {}'.format(self.url))
if not self._cache:
obj = json.load(open(self.cache_file, 'r'))
if 'reason' in obj:
self._cache = []
else:
self._cache = obj
with open(self.cache_location, 'r') as h:
self._cache = self.parse_cache(h.read())
return self._cache
"""
Returns values for the call. If the request is paginated go through
all pages
"""
def get_api(self):
page = 1
items = []
while page < MAX_PAGES:
headers, pageitems = self.get_api_page(page)
items.extend(pageitems)
if page >= int(headers['X-Total-Pages']):
return items
else:
page += 1
def get_api_page(self, page=0):
debug('{}, page {}'.format(self.url, page))
def get_api (self):
try:
_, data = self.get_api_url(self.url, self.query)
return data
except ApiError:
return None
def get_api_url(self, url, query = None):
if query:
url = '{}?{}'.format(url, urlencode(query))
try:
q = urlencode({'page': page, 'per_page': PER_PAGE})
url = '{}?{}'.format(self.url, q)
res = urlopen(url)
return ({ k: v for (k,v) in res.getheaders() }, json.loads(res.read()))
return ({ k: v for (k,v) in res.getheaders() }, self.parse_api_result(res.read()))
except URLError as e:
if hasattr(e, 'reason'):
raise ApiError(url, e.reason)
elif hasattr(e, 'code'):
raise ApiError(url, e.code)
# elif hasattr(e, 'code'):
# raise ApiError(url, e.code)
return None
def get (self):
if self.has_cache:
return self.load_cache()
if not self.has_cache:
data = self.get_api()
self.write_cache(data)
return self.read_cache()
def parse_api_result (self, raw):
return raw.decode()
def prepare_cache (self, raw):
return raw
def parse_cache (self, raw):
return raw
class ApiCallJson(ApiCall):
def parse_api_result(self, raw):
return json.loads(raw)
def prepare_cache(self, raw):
return json.dumps(raw)
def parse_cache(self, raw):
return json.loads(raw)
"""
Returns values for the call. If the request is paginated go through
all pages
"""
def get_api(self):
query = self.query.copy() if self.query else {}
if self.paged:
page = 1
data = []
query['per_page'] = PER_PAGE
max_pages = MAX_PAGES
try:
while page and page < max_pages:
query['page'] = page
headers, page_data = self.get_api_url(self.url, query)
data.extend(page_data)
page = int(headers['X-Next-Page']) if headers['X-Next-Page'] else None
except ApiError:
return data
else:
self.make_cache()
return self.load_cache()
query['per_page'] = 1
_, data = self.get_api_url(self.url, query)
return data
class ApiCallRaw (ApiCall):
multi_page = False
@property
def cache_location(self):
return os.path.join(self.cache_dir, '{}.data'.format('.'.join(self.api_path)))
def get_group (group_id):
return ApiCallJson(['groups', group_id], paged=False)
def get_project (project_id):
return ApiCallJson(['projects', project_id], paged=False)
# A way to make a call, cache and be able te remove the cache when needed
# prepare a call, set of url and local cache file
def group_projects (group = GROUP_ID):
return ApiCall(['groups', group, 'projects'])
def get_projects (group_id):
return ApiCallJson(['groups', group_id, 'projects'])
def get_commits (project_id):
return ApiCallJson(['projects', project_id, 'repository', 'commits'])
def commits (project_id = None):
if project_id is not None:
return ApiCall(['projects', project_id, 'repository', 'commits'])
def get_tree (project_id, path=None):
query = { 'path': path } if path else None
return ApiCallJson(['projects', project_id, 'repository', 'tree'], query=query)
def tree (project_id = None):
if project_id is not None:
return ApiCall(['projects', project_id, 'repository', 'tree'])
def get_raw (project_id, file_id):
return ApiCallRaw(['projects', project_id, 'repository', 'blobs', file_id, 'raw'])
\ No newline at end of file
import api
from jinja2 import Environment, FileSystemLoader, select_autoescape
import time
from models import Project
from models import Group
from settings import GROUP_ID, TEMPLATE_DIR, OUTPUT_DIR
import os.path
from os import makedirs
env = Environment(
loader=FileSystemLoader('templates'),
loader=FileSystemLoader(TEMPLATE_DIR),
autoescape=select_autoescape(['html', 'xml'])
)
group = Group(GROUP_ID)
group.get()
# print(group)
# for project in group.projects:
# print(hasattr(project, 'tree'))
# print(project.links)
# print(project.name)
# print(project.tree)
# print(group.projects.models)
def get_projects ():
call = api.group_projects()
return call.get()
# def get_projects ():
# call = api.group_projects(group_id = GROUP_ID)
# return call.get()
def get_tree (project_id):
call = api.tree(project_id)
return call.get()
# def get_tree (project_id):
# call = api.tree(project_id)
# return call.get()
def get_commits (project_id):
call = api.commits(project_id)
return call.get()
# def get_commits (project_id):
# call = api.commits(project_id)
# return call.get()
def make_local_url (url):
return url
# def make_local_url (url):
# return url
def parse_project (project):
return Project(id=project['id'], tree = get_tree(project['id']), commits = get_commits(project['id']))
# def parse_project (project):
# return Project(id=project['id'], name=project['name'], tree = get_tree(project['id']), commits = get_commits(project['id']))
print('Loading templates')
projects = [parse_project(project) for project in get_projects()]
# print('Loading templates')
# projects = [parse_project(project) for project in get_projects()]
print('Starting generation')
template = env.get_template('projects.html')
template.stream(projects=projects).dump('output/index.html')
template.stream(projects=group.projects).dump(os.path.join(OUTPUT_DIR, 'index.html'))
print('Generating individual project pages')
project_template = env.get_template('project.html')
for project in group.projects:
project_url = project.links['self']
project_name = os.path.basename(project_url)
project_path = os.path.dirname(project_url)
output_path = os.path.join(OUTPUT_DIR, project_path)
for project in projects:
project_template.stream(project=project).dump('output/{}.html'.format(project.name))
if not os.path.exists(output_path):
makedirs(output_path)
project_template.stream(project=project).dump(os.path.join(output_path, project_name))
print('Generation finished')
def update_project(project_id):
call = get_commits(project_id)
call.invalidate_cache()
call.get()
\ No newline at end of file
# def update_project(project_id):
# call = get_commits(project_id)
# call.invalidate_cache()
# call.get()
\ No newline at end of file
import utils
from utils import make_local_url
from api import ApiCallJson, ApiCallJson, ApiCall, get_commits, get_group, get_projects, get_project, get_tree
import os.path
import markdown
class Project (object):
def __init__ (self, id=None, name=None, tree=[], readme=None, commits = []):
self.id = id,
self.name = name,
self.tree = tree,
self.readme = readme,
self.commits = commits
"""
- Api Call encoded in the 'model'
- Possibility to avoid cache
Model
- constructor inserts data from the API
- way to retreive attributes: data, __getter__ () => key in data return
- 'get', get on the api Call
"""
class Model (object):
def __init__ (self, id, parent_id=None, data={}):
if id:
self.id = id
else:
raise ValueError("Need an id.")
self.data = data
if parent_id:
self.parent_id = parent_id
self.data = data
@property
def api_call (self):
raise NotImplementedError()
def get (self):
data = self.api_call.get()
if data:
for k, v in data.items():
self.__setattr__(k, v)
def __setattr__ (self, name, value):
if name == 'data' or name == 'id':
super().__setattr__(name, value)
else:
self.data[name] = value
def __getattr__ (self, name):
if name in self.data:
return self.data[name]
return super().__getattr__(name)
class Collection (object):
model = Model
def __init__ (self, models = []):
self.models = []
for model in models:
self.insert(model)
def insert (self, model):
if not isinstance(model, self.model):
model = self.model(model['id'], data=model)
self.models.append(model)
def __iter__ (self):
return iter(self.models)
def __len__ (self):
return len(self.models)
class Commit (Model):
pass
# @property
# def api_call (self):
# return ApiCallJson(['project', self.project_id, 'commits', self.id])
class Project (Model):
@property
def links (self):
# Derive the URL for the project from the name.
# Split on '.', remove last (filename)
# Remove first if it is OSP (old naming scheme)
parts = self.name.split('.')
path = parts[:-1]
name = '{}.html'.format(parts[-1])
if not path:
# Assume work if no prefix was found
path = ['work']
elif path[0] == 'osp':
path.pop(0)
return {
'self': utils.make_local_url('{}.html'.format(self.name))
}
\ No newline at end of file
'self': make_local_url(os.path.join(*path, name))
}
@property
def commits (self):
if not hasattr(self, '_commits'):
self._commits = Commits(get_commits(project_id=self.id).get())
return self._commits
@property
def tree (self):
if not hasattr(self, '_tree'):
self._tree = Tree(get_tree(project_id=self.id).get())
return self._tree
@property
def readme (self):
# Return a readme
pass
@property
def api_url (self):
return get_project(self.id).get()
class Projects (Collection):
model = Project
class Group (Model):
@property
def api_call (self):
return get_group(self.id)
@property
def projects (self):
if not hasattr(self, '_projects'):
self._projects = Projects(get_projects(self.id).get())
return self._projects
class Commits (Collection):
model = Commit
"""
Wrapper around a folder from the api.
- can display the foldername
- an iterator through the contents
"""
# class TreeFolder(object):
# pass
# class TreeFile():
# def __init__ (self, name, data):
# self.name = name
# self.data = data
# def __str__ (self):
# return self.data
# class TreeFileMarkdown(TreeFile):
# def __str__ (self):
# return markdown.markdown(self.data)
# class TreeFileImage(TreeFile):
# def __str__ (self):
# return self.path
# def thumbnail(width):
# pass
class TreeEntry (Model):
pass
class Tree (Collection):
model = TreeEntry
# class TreeEntry (object):
# props = ['id', 'name', 'type', 'path', 'mode', 'project']
# def __init__ (self, **kwargs):
# for k in kwargs:
# if k in self.props:
# self.__setattr__(k, kwargs[k])
# def __str__ (self):
# return self.get()
# def get (self):
# print('Trying to load', self.project.id)
# return raw(self.project.id, self.id).get()
\ No newline at end of file
import os.path
BASE_DIR = os.path.dirname(__file__)
CACHE_DIR = os.path.join(BASE_DIR, 'cache')
TEMPLATE_DIR = os.path.join(BASE_DIR, 'templates')
OUTPUT_DIR = os.path.join(BASE_DIR, 'output')
API_URL = "https://gitlab.constantvzw.org/api/v4/"
GROUP_ID = 8
DEBUG = False
MAX_PAGES = 999
PER_PAGE = 100
\ No newline at end of file
{{ project.name }}
{% if project.readme %}
{{ project.readme }}
{% endif %}
<ul>
{% for file in project.tree %}
<li>{{ file.name }}</li>{% endfor %}
......@@ -6,5 +9,6 @@
<ul>
{% for commit in project.commits %}
<li>{{ commit.message }}</li>{% endfor %}
<li>{{ commit.author_name }}: {{ commit.message }}</li>
{% endfor %}
</ul>
\ No newline at end of file
<ul>
{% for project in projects %}
<li><a href="{{ project.links.self }}">{{ project.name }}</a></li>{% endfor %}
<li><a href="{{ project.links.self }}">{{ project.name }} ({{ project.commits|length }} commits)</a></li>{% endfor %}
</ul>
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment