From 54933e8d5e1f5ee8de5908f665dcc56969e0e88f Mon Sep 17 00:00:00 2001
From: Alessio Igor Bogani <alessio.bogani@elettra.eu>
Date: Wed, 17 Feb 2021 17:54:14 +0100
Subject: [PATCH] Introduce inau-dispatcher and inau-restapi

---
 .gitignore           |   1 +
 doc/gitlab_event.txt |  71 ++++++++
 etc/inau.sql         | 359 ---------------------------------------
 etc/skel/.curlrc     |   2 +
 inau-dispatcher.py   | 393 +++++++++++++++++++++++++++++++++++++++++++
 inau-restapi.py      | 207 +++++++++++++++++++++++
 lib/db.py            | 127 ++++++++++++++
 lib/schema.py        |  95 +++++++++++
 lib/text.py          |  48 ++++++
 9 files changed, 944 insertions(+), 359 deletions(-)
 create mode 100644 .gitignore
 create mode 100644 doc/gitlab_event.txt
 delete mode 100644 etc/inau.sql
 create mode 100644 etc/skel/.curlrc
 create mode 100755 inau-dispatcher.py
 create mode 100755 inau-restapi.py
 create mode 100644 lib/db.py
 create mode 100644 lib/schema.py
 create mode 100644 lib/text.py

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..da49f6b
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+lib/__pycache__/
diff --git a/doc/gitlab_event.txt b/doc/gitlab_event.txt
new file mode 100644
index 0000000..00ad68d
--- /dev/null
+++ b/doc/gitlab_event.txt
@@ -0,0 +1,71 @@
+{'object_kind': 'push',
+'event_name': 'push',
+'before': '67475d770a571dec7da457bd86de73447bd92ccb',
+'after': '2900aab369da0f3e48012ef3fc30eb042752f118', 
+'ref': 'refs/heads/master', 
+'checkout_sha': '2900aab369da0f3e48012ef3fc30eb042752f118', 
+'message': None, 
+'user_id': 13, 
+'user_name': 'Alessio Igor Bogani', 
+'user_username': 'alessio.bogani', 
+'user_email': '', 
+'user_avatar': 'https://secure.gravatar.com/avatar/f13312e20198c46ed5356c9c7fa1bc2a?s=80&d=identicon', 
+'project_id': 297, 
+'project': {'id': 297,
+	 'name': 'fake',
+	 'description': 'Fake device, just for testing', 
+	 'web_url': 'https://gitlab.elettra.eu/cs/ds/fake', 
+	 'avatar_url': None, 
+	 'git_ssh_url': 'git@gitlab.elettra.eu:cs/ds/fake.git', 
+	 'git_http_url': 'https://gitlab.elettra.eu/cs/ds/fake.git', 
+	 'namespace': 'ds', 
+	 'visibility_level': 20, 
+	 'path_with_namespace': 'cs/ds/fake', 
+	 'default_branch': 'master', 
+	 'ci_config_path': None, 
+	 'homepage': 'https://gitlab.elettra.eu/cs/ds/fake', 
+	 'url': 'git@gitlab.elettra.eu:cs/ds/fake.git', 
+	 'ssh_url': 'git@gitlab.elettra.eu:cs/ds/fake.git', 
+	 'http_url': 'https://gitlab.elettra.eu/cs/ds/fake.git'
+	 },
+'commits': [
+	{'id': '2900aab369da0f3e48012ef3fc30eb042752f118', 
+	'message': 'Update Makefile\n', 
+	'title': 'Update Makefile', 
+	'timestamp': '2020-11-16T13:54:23+01:00', 
+	'url': 'https://gitlab.elettra.eu/cs/ds/fake/-/commit/2900aab369da0f3e48012ef3fc30eb042752f118',
+	'author': {'name': 'Alessio Igor Bogani', 
+		'email': 'alessio.bogani@elettra.eu'},
+	'added': [],
+	'modified': ['Makefile'],
+	'removed': []},
+	 {'id': '804ccdf62dfcc8605b550e7ea9c456432798c678', 
+	 'message': 'test 25\n',
+	 'title': 'test 25', 
+	 'timestamp': '2020-09-23T14:06:57+02:00', 
+	 'url': 'https://gitlab.elettra.eu/cs/ds/fake/-/commit/804ccdf62dfcc8605b550e7ea9c456432798c678', 
+	 'author': {'name': 'Alessio Igor Bogani',
+	 	'email': 'alessio.bogani@elettra.eu'},
+	 'added': [],
+	 'modified': ['src/Fake.cpp'], 
+	 'removed': []}, 
+	 {'id': '67475d770a571dec7da457bd86de73447bd92ccb', 
+	 'message': 'test 24\n', 
+	 'title': 'test 24', 
+	 'timestamp': '2020-09-23T09:59:28+02:00', 
+	 'url': 'https://gitlab.elettra.eu/cs/ds/fake/-/commit/67475d770a571dec7da457bd86de73447bd92ccb', 
+	 'author': {'name': 'Alessio Igor Bogani', 
+	 	'email': 'alessio.bogani@elettra.eu'}, 
+	 'added': [], 
+	 'modified': ['src/Fake.cpp'], 
+	 'removed': []}
+	 ],
+'total_commits_count': 3, 
+'push_options': {}, 
+'repository': {'name': 'fake', 
+	'url': 'git@gitlab.elettra.eu:cs/ds/fake.git', 
+	'description': 'Fake device, just for testing', 
+	'homepage': 'https://gitlab.elettra.eu/cs/ds/fake', 
+	'git_http_url': 'https://gitlab.elettra.eu/cs/ds/fake.git', 
+	'git_ssh_url': 'git@gitlab.elettra.eu:cs/ds/fake.git', 
+	'visibility_level': 20}}
diff --git a/etc/inau.sql b/etc/inau.sql
deleted file mode 100644
index d95e1b7..0000000
--- a/etc/inau.sql
+++ /dev/null
@@ -1,359 +0,0 @@
--- MySQL dump 10.13  Distrib 5.7.28, for Linux (x86_64)
---
--- Host: localhost    Database: inau
--- ------------------------------------------------------
--- Server version	5.7.28-0ubuntu0.18.04.4
-
-/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
-/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
-/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
-/*!40101 SET NAMES utf8 */;
-/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
-/*!40103 SET TIME_ZONE='+00:00' */;
-/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
-/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
-/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-
---
--- Current Database: `inau`
---
-
-CREATE DATABASE /*!32312 IF NOT EXISTS*/ `inau` /*!40100 DEFAULT CHARACTER SET latin1 */;
-
-USE `inau`;
-
---
--- Table structure for table `architectures`
---
-
-DROP TABLE IF EXISTS `architectures`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `architectures` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `name` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  UNIQUE KEY `name` (`name`)
-) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `architectures`
---
-
-LOCK TABLES `architectures` WRITE;
-/*!40000 ALTER TABLE `architectures` DISABLE KEYS */;
-INSERT INTO `architectures` VALUES (1,'ppc'),(2,'i686'),(3,'x86_64');
-/*!40000 ALTER TABLE `architectures` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `artifacts`
---
-
-DROP TABLE IF EXISTS `artifacts`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `artifacts` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `build_id` int(11) NOT NULL,
-  `hash` varchar(255) NOT NULL,
-  `filename` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `build_id` (`build_id`),
-  CONSTRAINT `artifacts_ibfk_1` FOREIGN KEY (`build_id`) REFERENCES `builds` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=238 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
-
---
--- Table structure for table `builders`
---
-
-DROP TABLE IF EXISTS `builders`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `builders` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `platform_id` int(11) NOT NULL,
-  `name` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `platform_id` (`platform_id`),
-  CONSTRAINT `builders_ibfk_1` FOREIGN KEY (`platform_id`) REFERENCES `platforms` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=8 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `builders`
---
-
-LOCK TABLES `builders` WRITE;
-/*!40000 ALTER TABLE `builders` DISABLE KEYS */;
-INSERT INTO `builders` VALUES (1,1,'aringa'),(2,2,'ken'),(3,3,'ken64'),(4,4,'sakurambo'),(5,5,'kenng64'),(6,6,'gaia'),(7,7,'sakurambo64');
-/*!40000 ALTER TABLE `builders` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `builds`
---
-
-DROP TABLE IF EXISTS `builds`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `builds` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `repository_id` int(11) NOT NULL,
-  `tag` varchar(255) NOT NULL,
-  `date` datetime NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `repository_id` (`repository_id`),
-  CONSTRAINT `builds_ibfk_1` FOREIGN KEY (`repository_id`) REFERENCES `repositories` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=230 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `builds`
---
-
---
--- Table structure for table `distributions`
---
-
-DROP TABLE IF EXISTS `distributions`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `distributions` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `name` varchar(255) NOT NULL,
-  `version` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=8 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `distributions`
---
-
-LOCK TABLES `distributions` WRITE;
-/*!40000 ALTER TABLE `distributions` DISABLE KEYS */;
-INSERT INTO `distributions` VALUES (1,'Debian','3.0'),(2,'Ubuntu','7.10'),(3,'Ubuntu','10.04'),(4,'Ubuntu','14.04'),(5,'Ubuntu','16.04'),(6,'Ubuntu','18.04'),(7,'Ubuntu','10.04-caen');
-/*!40000 ALTER TABLE `distributions` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `facilities`
---
-
-DROP TABLE IF EXISTS `facilities`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `facilities` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `name` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  UNIQUE KEY `name` (`name`)
-) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `facilities`
---
-
-LOCK TABLES `facilities` WRITE;
-/*!40000 ALTER TABLE `facilities` DISABLE KEYS */;
-INSERT INTO `facilities` VALUES (7,'diproi'),(1,'elettra'),(2,'fermi'),(9,'ldm'),(6,'magnedyn'),(3,'padres'),(8,'terafermi'),(5,'timer'),(4,'timex');
-/*!40000 ALTER TABLE `facilities` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `hosts`
---
-
-DROP TABLE IF EXISTS `hosts`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `hosts` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `facility_id` int(11) NOT NULL,
-  `server_id` int(11) NOT NULL,
-  `name` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  UNIQUE KEY `name` (`name`),
-  KEY `facility_id` (`facility_id`),
-  KEY `server_id` (`server_id`),
-  CONSTRAINT `hosts_ibfk_1` FOREIGN KEY (`facility_id`) REFERENCES `facilities` (`id`),
-  CONSTRAINT `hosts_ibfk_2` FOREIGN KEY (`server_id`) REFERENCES `servers` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=138 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `hosts`
---
-
---
--- Table structure for table `installations`
---
-
-DROP TABLE IF EXISTS `installations`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `installations` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `host_id` int(11) NOT NULL,
-  `user_id` int(11) NOT NULL,
-  `build_id` int(11) NOT NULL,
-  `type` int(11) NOT NULL,
-  `date` datetime NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `host_id` (`host_id`),
-  KEY `user_id` (`user_id`),
-  KEY `build_id` (`build_id`),
-  CONSTRAINT `installations_ibfk_1` FOREIGN KEY (`host_id`) REFERENCES `hosts` (`id`),
-  CONSTRAINT `installations_ibfk_2` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`),
-  CONSTRAINT `installations_ibfk_3` FOREIGN KEY (`build_id`) REFERENCES `builds` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=4982 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `installations`
---
-
---
--- Table structure for table `platforms`
---
-
-DROP TABLE IF EXISTS `platforms`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `platforms` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `distribution_id` int(11) NOT NULL,
-  `architecture_id` int(11) NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `distribution_id` (`distribution_id`),
-  KEY `architecture_id` (`architecture_id`),
-  CONSTRAINT `platforms_ibfk_1` FOREIGN KEY (`distribution_id`) REFERENCES `distributions` (`id`),
-  CONSTRAINT `platforms_ibfk_2` FOREIGN KEY (`architecture_id`) REFERENCES `architectures` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=8 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `platforms`
---
-
-LOCK TABLES `platforms` WRITE;
-/*!40000 ALTER TABLE `platforms` DISABLE KEYS */;
-INSERT INTO `platforms` VALUES (1,2,1),(2,3,2),(3,3,3),(4,4,3),(5,5,3),(6,6,3),(7,7,3);
-/*!40000 ALTER TABLE `platforms` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `providers`
---
-
-DROP TABLE IF EXISTS `providers`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `providers` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `url` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  UNIQUE KEY `url` (`url`)
-) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `providers`
---
-
-LOCK TABLES `providers` WRITE;
-/*!40000 ALTER TABLE `providers` DISABLE KEYS */;
-INSERT INTO `providers` VALUES (6,'https://github.com/ELETTRA-SincrotroneTrieste/'),(3,'ssh://git@gitlab.elettra.eu:/alessio.bogani/'),(1,'ssh://git@gitlab.elettra.eu:/cs/ds/'),(4,'ssh://git@gitlab.elettra.eu:/cs/etc/browser/'),(2,'ssh://git@gitlab.elettra.eu:/cs/gui/'),(5,'ssh://git@gitlab.elettra.eu:/cs/util/');
-/*!40000 ALTER TABLE `providers` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `repositories`
---
-
-DROP TABLE IF EXISTS `repositories`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `repositories` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `provider_id` int(11) NOT NULL,
-  `platform_id` int(11) NOT NULL,
-  `type` int(11) NOT NULL,
-  `name` varchar(255) NOT NULL,
-  `destination` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `provider_id` (`provider_id`),
-  KEY `platform_id` (`platform_id`),
-  CONSTRAINT `repositories_ibfk_1` FOREIGN KEY (`provider_id`) REFERENCES `providers` (`id`),
-  CONSTRAINT `repositories_ibfk_2` FOREIGN KEY (`platform_id`) REFERENCES `platforms` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=193 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `repositories`
---
-
---
--- Table structure for table `servers`
---
-
-DROP TABLE IF EXISTS `servers`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `servers` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `platform_id` int(11) NOT NULL,
-  `name` varchar(255) NOT NULL,
-  `prefix` varchar(255) NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `platform_id` (`platform_id`),
-  CONSTRAINT `servers_ibfk_1` FOREIGN KEY (`platform_id`) REFERENCES `platforms` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=35 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `servers`
---
-
---
--- Table structure for table `users`
---
-
-DROP TABLE IF EXISTS `users`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `users` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `name` varchar(255) NOT NULL,
-  `admin` tinyint(1) NOT NULL,
-  PRIMARY KEY (`id`),
-  UNIQUE KEY `name` (`name`)
-) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `users`
---
-
-LOCK TABLES `users` WRITE;
-/*!40000 ALTER TABLE `users` DISABLE KEYS */;
-INSERT INTO `users` VALUES (1,'alessio.bogani',1),(2,'lorenzo.pivetta',1);
-/*!40000 ALTER TABLE `users` ENABLE KEYS */;
-UNLOCK TABLES;
-/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
-
-/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
-/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
-/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
-/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
-/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
-/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-
--- Dump completed on 2020-05-06 12:37:09
diff --git a/etc/skel/.curlrc b/etc/skel/.curlrc
new file mode 100644
index 0000000..fbd11c1
--- /dev/null
+++ b/etc/skel/.curlrc
@@ -0,0 +1,2 @@
+header "Accept: text/plain"
+write-out \n
diff --git a/inau-dispatcher.py b/inau-dispatcher.py
new file mode 100755
index 0000000..78342e9
--- /dev/null
+++ b/inau-dispatcher.py
@@ -0,0 +1,393 @@
+#!/usr/bin/env python3
+
+from http.server import BaseHTTPRequestHandler, HTTPServer
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker, exc
+from multiprocessing import Process, Queue
+from enum import Enum, IntEnum
+import os
+import signal
+import json
+import sys
+import logging
+import logging.handlers
+import argparse
+import subprocess
+import paramiko
+import hashlib
+import shutil
+#import requests
+#import urllib.parse
+from smtplib import SMTP
+from email.mime.text import MIMEText
+#from distutils.version import StrictVersion
+
+from lib import db
+
+Session = sessionmaker()
+
+allbuilders = {}
+#repositories = []
+
+def __sendEmail(to_addrs, subject, body):
+    if to_addrs:
+        with SMTP(args.smtpserver + "." + args.smtpdomain, port=25) as smtpClient:
+            sender = args.smtpsender + "@" + args.smtpdomain
+            msg = MIMEText(body)
+            msg['Subject'] = "INAU. " + subject
+            msg['From'] = sender
+            msg['To'] = ', '.join(to_addrs)
+            smtpClient.sendmail(from_addr=sender, to_addrs=list(to_addrs), msg=msg.as_string())
+
+def sendEmail(session, recipients, subject, body):
+    users = set()
+    for user in session.query(db.Users).filter(db.Users.notify==True).all():
+        users.add(user.name + "@" + args.smtpdomain)
+    to_addrs = set(recipients).intersection(users)
+    __sendEmail(to_addrs, subject, body)
+
+def sendEmailAdmins(session, subject, body):
+    to_addrs = set()
+    for admin in session.query(db.Users).filter(db.Users.admin==True).all():
+        to_addrs.add(admin.name + "@" + args.smtpdomain)
+    __sendEmail(to_addrs, subject, body)
+
+class JobType(IntEnum):
+    kill = 0,
+    build = 1,
+    update = 2
+
+class Job:
+    def __init__(self, type, repository_name=None, repository_url=None, repository_type=None,
+            platform_id=None, build_tag=None, build_id=None, emails=None):
+        self.type = type
+        self.repository_name = repository_name
+        self.repository_url = repository_url
+        self.repository_type = repository_type
+        self.build_tag = build_tag
+        self.platform_id = platform_id
+        self.build_id = build_id
+        self.emails = emails
+
+class Builder:
+    def __init__(self, name):
+        self.name = name
+        self.queue = Queue()
+        self.process = Process(target=self.build, name=name)
+        self.process.start()
+    def build(self):
+        print("Parte buidler di " + self.name) # FIXME Debug
+        while True:
+            try:
+                print("buidler di " + self.name + " in attesa...") # FIXME Debug
+                job = self.queue.get()
+
+                if job.type == JobType.kill:
+                    print("Si ferma buodler di " + self.name) # FIXME Debug
+                    break
+
+                print("buidler di " + self.name + " in azione... su: ") # FIXME Debug
+                print(job.type) # FIXME Debug
+                print(job.repository_name) # FIXME Debug
+                print(job.repository_url) # FIXME Debug
+                print(job.repository_type) # FIXME Debug
+                print(job.build_tag) # FIXME Debug
+                print(job.platform_id) # FIXME Debug
+                print(job.build_id) # FIXME Debug
+                print(job.emails) # FIXME Debug
+
+                engine.dispose()
+                session = Session()
+
+                try:
+                    platdir = args.repo + '/' + str(job.platform_id)
+                    builddir = platdir + "/" + job.repository_name
+                    if not os.path.isdir(platdir):
+                        os.mkdir(platdir)
+                    if os.path.isdir(builddir):
+                        subprocess.run(["git -C " + builddir + " remote update"], shell=True, check=True)
+                        subprocess.run(["git -C " + builddir + " submodule update --remote --force --recursive"], shell=True, check=True)
+                    else:
+                        ret = subprocess.run(["git clone --recurse-submodule " + job.repository_url + " " + builddir], shell=True, check=True)
+                    subprocess.run(["git -C " + builddir + " reset --hard " + job.build_tag], shell=True, check=True)
+                
+                    if job.type == JobType.update:
+                        continue
+
+                    with paramiko.SSHClient() as sshClient:
+                        sshClient.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+                        sshClient.connect(hostname=self.name, port=22, username="inau",
+                                key_filename="/home/inau/.ssh/id_rsa.pub")
+                        _, raw, _ = sshClient.exec_command("(source /etc/profile; cd " + builddir 
+                                            + " && (test -f *.pro && qmake && cuuimake --plain-text-output);"
+                                            + " make -j`getconf _NPROCESSORS_ONLN`) 2>&1")
+                        status = raw.channel.recv_exit_status()
+                        output = raw.read().decode('latin-1') # utf-8 is rejected by Mysql despite the right character set is configured
+
+                    if job.build_id:
+                        build = session.query(db.Builds).filter(db.Builds.id==job.build_id).one()
+                        build.date = datetime.datetime.now()
+                        build.status = status
+                        build.output = output
+                        session.commit()
+
+                    outcome = job.repository_name + " " + os.path.basename(job.build_tag) 
+                    if status != 0:
+                        outcome += ": built failed on " + self.name
+                    else:
+                        outcome += ": built successfully on " + self.name
+                        if job.build_id:
+                            if job.repository_type == db.RepositoryType.cplusplus or job.repository_type == db.RepositoryType.python \
+                                    or job.repository_type == db.RepositoryType.shellscript:
+                                basedir = builddir + "/bin/"
+                            elif job.repository_type == db.RepositoryType.configuration:
+                                basedir = builddir + "/etc/"
+                            else:
+                                raiseException('Invalid type')
+
+                            for r, d, f in os.walk(basedir):
+                                dir = ""
+                                if r != basedir:
+                                    dir = os.path.basename(r) + "/"
+                                artifacts = []
+                                for file in f:
+                                    hashFile = ""
+                                    with open(basedir + dir + file,"rb") as fd:
+                                        bytes = fd.read()
+                                        hashFile = hashlib.sha256(bytes).hexdigest();
+                                        if not os.path.isfile(args.store + hashFile):
+                                            shutil.copyfile(basedir + dir + file, args.store + hashFile, follow_symlinks=False)
+                                    artifacts.append(db.Artifacts(build_id=job.build_id, hash=hashFile, filename=dir+file))
+                                session.add_all(artifacts)
+                                session.commit()
+
+                    sendEmail(session, job.emails, outcome, output)
+
+                except subprocess.CalledProcessError as c:
+                    print("C 1:", c)    # TODO
+                except Exception as e:
+                    session.rollback()
+                    print("E 1:", e, type(e))    # TODO
+                finally:
+                    session.close()
+
+            # TODO Come funzione in background?????
+            except KeyboardInterrupt as k:
+                break
+            except Exception as e:
+                print("E 2: ", e)    # TODO
+
+
+def signalHandler(signalNumber, frame):
+    reconcile()
+
+def reconcile():
+    logger.info('Reconciling...')
+
+    session = Session()
+
+    try:
+#        global allbuilders, repositories
+        global allbuilders
+
+        newbuilders = {}
+        oldbuilders = {}
+        for b in session.query(db.Builders).all():
+            try:
+                newbuilders[b.platform_id].append(Builder(b.name))
+            except KeyError:
+                newbuilders[b.platform_id] = [Builder(b.name)]
+        oldbuilders = allbuilders
+        allbuilders = newbuilders
+
+        for oldbuilder in oldbuilders.values():
+            for b in oldbuilder:
+                b.queue.put(Job(type=JobType.kill))
+                b.process.join()
+
+#        newrepositories = []
+#        for repository in session.query(db.Repositories2).all():
+#            newrepositories.append(repository)
+#        repositories = newrepositories
+#
+#        for repo in session.query(db.Repositories2).join(db.Providers). \
+#                with_entities(db.Repositories2.id, db.Repositories2.name, db.Repositories2.type, db.Providers.url).all():
+#            req = requests.get('https://gitlab.elettra.eu/api/v4/projects/' 
+#                    + urllib.parse.quote(repo.name, safe='') + '/repository/tags')
+#            data = req.json()
+#            if req.status_code == 200:
+#                # Retrieve commited tags
+#                ctags = []
+#                for tag in data:
+#                    if tag['target'] != tag['commit']['id']:
+#                        ctags.append(tag['name'])
+#                ctags.sort(key=StrictVersion)
+#
+#                for platform_id, builders in allbuilders.items():
+#                    builds = session.query(db.Builds).filter(db.Builds.repository_id==repo.id, 
+#                            db.Builds.platform_id==platform_id).all()
+#                    # Retrieve builded tags
+#                    btags = []
+#                    for build in builds:
+#                        btags.append(build.tag)
+#                    btags.sort(key=StrictVersion)
+#
+#                    mtags = list(set(ctags).difference(set(btags)))
+#                    mtags.sort(key=StrictVersion)
+#                    
+#                    if mtags:
+#                        i = ctags.index(mtags[0])
+#                        if i:
+#                            # Re-build the previous built version
+#                            idx = builders.index(min(builders, key=lambda x:x.queue.qsize()))
+#                            builders[idx].queue.put(Job(type=JobType.build, repository_name = repo.name,
+#                                repository_url = repo.url + ":" + repo.name, repository_type = repo.type, 
+#                                platform_id = platform_id, build_tag = ctags[i-1]))
+#
+#                        # Build missing tags
+#                        emails = []
+#                        for mtag in mtags:
+#                            idx = builders.index(min(builders, key=lambda x:x.queue.qsize()))
+#                            emails.clear()
+#                            for tag in data:
+#                                if tag['name'] == mtag:
+#                                    emails = [tag['commit']['author_email']]
+#                                    break
+#                            build = db.Builds(repository_id=repo.id, platform_id=platform_id, tag=mtag)
+#                            session.add(build)
+#                            session.commit()
+#   
+#                            builders[idx].queue.put(Job(type=JobType.build, repository_name = repo.name,
+#                                repository_url = repo.url + ":" + repo.name, repository_type = repo.type, 
+#                                platform_id = platform_id, build_tag = mtag, build_id = build.id, emails=emails))
+#
+    except Exception as e:
+        session.rollback()
+        print("E 3: ", e)     # TODO
+    finally:
+        session.close()
+
+class Server(BaseHTTPRequestHandler):
+    def do_POST(self):
+        content_length = int(self.headers['Content-Length']) 
+        post_data = self.rfile.read(content_length)
+
+        if self.headers['Content-Type'] != 'application/json':
+            self.send_response(415)
+            self.end_headers()
+            return
+
+        post_json = json.loads(post_data.decode('utf-8'))
+        print(post_json) # FIXME DEBUG
+
+        # Tag deletion
+        if post_json['after'] == '0000000000000000000000000000000000000000':
+            self.send_response(415)
+            self.end_headers()
+            return
+
+        # Check if the tag is lightweight
+        if post_json['after'] == post_json['commits'][0]['id']:
+            self.send_response(400)
+            self.end_headers()
+            return
+
+        builds = []
+        rn = ''
+        rt = ''
+
+        session = Session()
+        for r in session.query(db.Repositories).filter(db.Repositories.name==post_json['project']['path_with_namespace']).all():
+            rn = r.name
+            rt = r.type
+            if r.name == "cs/ds/makefiles" and self.headers['X-Gitlab-Event'] == 'Push Hook' and post_json['event_name'] == 'push':
+                jt = JobType.update 
+            elif self.headers['X-Gitlab-Event'] == 'Tag Push Hook' and post_json['event_name'] == 'tag_push':
+                jt = JobType.build
+            else:
+                self.send_response(400)
+                self.end_headers()
+                session.close()
+                return
+
+            builds.append(db.Builds(repository_id=r.id, platform_id=r.platform_id, tag=post_json['ref']))
+               
+        if not builds:
+            self.send_response(404)
+            self.end_headers()
+            session.close()
+            return
+
+        if jt == JobType.build:
+            try:
+                session.add_all(builds)
+                session.commit()
+            except:
+                session.rollback()
+                session.close()
+                self.send_response(500)
+                self.end_headers()
+                return
+       
+        for build in builds:
+            print('Assign the job to the builder with shortest queue length...')
+            idx = allbuilders[build.platform_id].index(min(allbuilders[build.platform_id], 
+                key=lambda x:x.queue.qsize()))
+            allbuilders[build.platform_id][idx].queue.put(Job(type=jt, repository_name = rn, 
+                repository_url = post_json['project']['http_url'], repository_type = rt, 
+                platform_id = build.platform_id, build_tag=post_json['ref'], build_id=build.id, 
+                emails=[post_json['commits'][0]['author']['email'], post_json['user_email']]))
+
+        self.send_response(200)
+        self.end_headers()
+
+        session.close()
+
+def run(address, port, server_class=HTTPServer, handler_class=Server):
+    logger.info('Starting...')
+    server_address = (address, port)
+    httpd = server_class(server_address, handler_class)
+    try:
+        httpd.serve_forever()
+    except KeyboardInterrupt:
+        pass
+    httpd.server_close()
+    logger.info('Stopping...')
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--db", type=str, help='Database URI to connect to', required=True)
+    parser.add_argument('--bind', type=str, default='localhost', help='IP Address or hostname to bind to')
+    parser.add_argument('--port', type=int, default=443, help='Port to listen to')
+    parser.add_argument("--store", type=str, default='/scratch/build/files-store/')
+    parser.add_argument("--repo", type=str, default='/scratch/build/repositories/')
+    parser.add_argument("--smtpserver", type=str, default="smtp")
+    parser.add_argument("--smtpsender", type=str, default="noreply")
+    parser.add_argument("--smtpdomain", type=str, default="elettra.eu")
+    args = parser.parse_args()
+
+    if os.getpgrp() == os.tcgetpgrp(sys.stdout.fileno()):
+        # Executed in foreground (Development)
+        logging.basicConfig(level=logging.INFO)
+        engine = create_engine(args.db, pool_pre_ping=True, echo=True)
+    else:
+        # Executed in background (Production)
+        syslog_handler = logging.handlers.SysLogHandler(address='/dev/log')
+        logging.basicConfig(level=logging.INFO, handlers=[syslog_handler])
+        engine = create_engine(args.db, pool_pre_ping=True, echo=False)
+
+    logger = logging.getLogger('inauDispatcher')
+    
+    Session.configure(bind=engine)
+
+    reconcile()
+
+    signal.signal(signal.SIGUSR1, signalHandler)
+
+    if args.bind:
+            run(args.bind,args.port)
+
+    # FIXME It is necessary?
+    for platform_id, builders in allbuilders.items():
+        for builder in builders:
+            builder.process.join()
diff --git a/inau-restapi.py b/inau-restapi.py
new file mode 100755
index 0000000..cdaf0f6
--- /dev/null
+++ b/inau-restapi.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python3
+
+#from flask import Flask, make_response, request, session
+#from flask_classful import FlaskView, route
+#from marshmallow import Schema, pre_load, post_load, post_dump, fields, ValidationError
+#from webargs.flaskparser import use_kwargs
+#from sqlalchemy import create_engine
+#from sqlalchemy.orm import scoped_session, sessionmaker, exc
+#from werkzeug.exceptions import HTTPException, Unauthorized, Forbidden, InternalServerError, MethodNotAllowed, BadRequest, UnprocessableEntity, NotFound, PreconditionFailed, UnsupportedMediaType
+#from enum import Enum, IntEnum
+#from datetime import timedelta
+#import argparse
+#import uuid
+#import json
+#import ldap
+#import base64
+#from lib import text, db, schema
+
+#app = Flask(__name__)
+#app.secret_key = str(uuid.uuid4())
+#dbsession_factory = sessionmaker()
+
+#class AuthenticationType(Enum):
+#    USER = 0,
+#    ADMIN = 1
+
+#def authenticate(authtype, dbsession, request):
+#    session.permanent = True
+#    if "username" not in session:
+#        if request.headers.get('Authorization') == None:
+#            raise Unauthorized()
+#        split = request.headers.get('Authorization').strip().split(' ')
+#        username, password = base64.b64decode(split[1]).decode().split(':', 1)
+#        user = dbsession.query(db.Users).filter(db.Users.name == username).first()
+#        if user is None:
+#           raise Forbidden()
+#        if authtype == AuthenticationType.ADMIN and user.admin is False:
+#            raise Forbidden()
+#        try:
+#            auth = ldap.initialize(args.ldap, bytes_mode=False)
+#            auth.simple_bind_s("uid=" + username +",ou=people,dc=elettra,dc=eu", password)
+#            auth.unbind_s()
+#        except Exception as e:
+#            raise Forbidden()
+#        session["username"] = username
+#    return session["username"]
+
+#@app.teardown_request
+#def dbsession_remove(exc):
+#    DBSession.remove()
+
+#def output_json(data, code, headers=None):
+#    content_type = 'application/json'
+#    dumped = json.dumps(data)
+#    if headers:
+#        headers.update({'Content-Type': content_type})
+#    else:
+#        headers = {'Content-Type': content_type}
+#    response = make_response(dumped, code, headers)
+#    return response
+
+#def output_text(data, code, headers=None):
+#    content_type = 'text/plain'
+#    dumped = text.dumps(data)
+#    if headers:
+#        headers.update({'Content-Type': content_type})
+#    else:
+#        headers = {'Content-Type': content_type}
+#    response = make_response(dumped, code, headers)
+#    return response
+
+#default_representations = {
+#        'flask-classful/default': output_json,
+#        'application/json': output_json,
+#        'text/plain': output_text
+#        }
+#
+#class UsersView3(FlaskView):
+#    route_base = "users"
+#    representations = default_representations
+#    def index(self):
+#        dbsession = DBSession()
+#        users = dbsession.query(db.Users) \
+#                .order_by(db.Users.id).all()
+#        return {'users': schema.users_schema.dump(users)}, 200 if users else 204
+##    def get(self, id: int):
+##        dbsession = DBSession()
+##        user = dbsession.query(db.Users).filter(db.Users.id==id).one_or_none()
+##        return user_schema.dump(user), 200 if user else 204
+##    def delete(self, id: int):
+##        dbsession = DBSession()
+##        try:
+##            authenticate(AuthenticationType.ADMIN, dbsession, request)
+##            user = dbsession.query(db.Users).filter(db.Users.id==id).one()
+##            dbsession.delete(user)
+##            dbsession.commit()
+##        except exc.NoResultFound as n:
+##            return {"error": str(n)}, 404
+##        except HTTPException as h:
+##            return {"error": h.description}, h.code
+##        except Exception as e:
+##            dbsession.rollback()
+##            return {"error": str(e)}, 500
+##        return {}, 204
+##    @use_kwargs(user_schema.fields, location="json_or_form")
+##    def put(self, id: int, **kwargs):
+### TODO
+##        pass
+##    @use_kwargs(user_schema.fields, location="json_or_form")
+##    def patch(self, id: int, **kwargs):
+##        dbsession = DBSession()
+##        try:
+##            authenticate(AuthenticationType.ADMIN, dbsession, request)
+##            newuser = user_schema.load(kwargs)
+##            user = dbsession.query(db.Users).filter(db.Users.id==id).one()
+##            if newuser.name is not None: user.name = newuser.name
+##            if newuser.admin is not None: user.admin = newuser.admin
+##            if newuser.notify is not None: user.notify = newuser.notify
+##            dbsession.commit()
+##        except exc.NoResultFound as n:
+##            return {"error": str(n)}, 404
+##        except HTTPException as h:
+##            return {"error": h.description}, h.code
+##        except Exception as e:
+##            dbsession.rollback()
+##            return {"error": str(e)}, 500
+##        return user_schema.dump(user), 200
+##    @use_kwargs(user_schema.fields, location="json_or_form")
+##    def post(self, **kwargs):
+##        dbsession = DBSession()
+##        try:
+##            authenticate(AuthenticationType.ADMIN, dbsession, request)
+##            user = user_schema.load(kwargs)
+##            dbsession.add(user)
+##            dbsession.commit()
+##        except ValidationError as v:
+##            return {"error": str(v)}, 500
+##        except HTTPException as h:
+##            return {"error": h.description}, h.code
+##        except Exception as e:
+##            dbsession.rollback()
+##            return {"error": str(e)}, 500
+##        return user_schema.dump(user), 201
+#
+#class PlatformsView3(FlaskView):
+#    route_base = "platforms"
+#    representations = default_representations
+#    def index(self):
+#        dbsession = DBSession()
+#        platforms = dbsession.query(db.Platforms) \
+#                .order_by(db.Platforms.id).all()
+#        return {'platforms': schema.platforms_schema.dump(platforms) }, \
+#                200 if platforms else 204
+#
+#class FacilitiesView3(FlaskView):
+#    route_base = "facilities"
+#    representations = default_representations
+#    def index(self):
+#        dbsession = DBSession()
+#        facilities = dbsession.query(db.Facilities) \
+#                .order_by(db.Facilities.id).all()
+#        return {'facilities': schema.facilities_schema.dump(facilities)}, \
+#                200 if facilities else 204
+#
+#class ServersView3(FlaskView):
+#    route_base = "servers"
+#    representations = default_representations
+#    def index(self):
+#        dbsession = DBSession()
+#        servers = dbsession.query(db.Servers) \
+#                .order_by(db.Servers.name).all()
+#        return {'servers': schema.servers_schema.dump(servers)}, \
+#                200 if servers else 204
+#
+##class HostsView3(FlaskView):
+##    route_base = "hosts"
+##    representations = default_representations
+##    def index(self, facility):
+##        print(facility)
+##        dbsession = DBSession()
+##        hosts = dbsession.query(db.Hosts) \
+##                .order_by(db.Hosts.id).all()
+##        return {'hosts': schema.hosts_schema.dump(hosts)}, \
+##                200 if hosts else 204
+#
+#UsersView3.register(app, route_prefix='/v3/cs/')
+#PlatformsView3.register(app, route_prefix='/v3/cs/')
+#ServersView3.register(app, route_prefix='/v3/cs/')
+#FacilitiesView3.register(app, route_prefix='/v3/cs/')
+##HostsView3.register(app, route_prefix='/v3/cs/<string>/')
+#
+#if __name__ == '__main__':
+#    parser_args = argparse.ArgumentParser()
+#    parser_args.add_argument("--db", type=str, help='Database URI to connect to', required=True)
+#    parser_args.add_argument('--bind', type=str, default='localhost', help='IP Address or hostname to bind to')
+#    parser_args.add_argument('--port', type=int, default=8080, help='Port to listen to')
+#    parser_args.add_argument("--store", default="/scratch/build/files-store/")
+#    parser_args.add_argument("--ldap", default="ldaps://abook.elettra.eu:636")
+#    args = parser_args.parse_args()
+#
+#    engine = create_engine(args.db, pool_pre_ping=True, echo=True)
+#    dbsession_factory.configure(bind=engine)
+#    DBSession = scoped_session(dbsession_factory)
+#
+#    app.run(host=args.bind, port=args.port, threaded=True,
+#            ssl_context=('/etc/ssl/certs/inau_elettra_eu.crt',
+#                '/etc/ssl/private/inau_elettra_eu.key'))
diff --git a/lib/db.py b/lib/db.py
new file mode 100644
index 0000000..f8a24d7
--- /dev/null
+++ b/lib/db.py
@@ -0,0 +1,127 @@
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, DateTime, Boolean, Text, ForeignKey, func
+#from sqlalchemy.orm import relationship
+from enum import Enum, IntEnum
+import datetime
+
+Base = declarative_base()
+
+class Users(Base):
+    __tablename__ = 'users'
+    id = Column(Integer, primary_key=True)
+    name = Column(String(255), unique=True, nullable=False)
+    admin = Column(Boolean, default=False, nullable=False)
+    notify = Column(Boolean, default=False, nullable=False)
+
+class Architectures(Base):
+    __tablename__ = 'architectures'
+    id = Column(Integer, primary_key=True)
+    name = Column(String(255), unique=True, nullable=False)
+#    platforms = relationship('Platforms', back_populates='architecture')
+
+class Distributions(Base):
+    __tablename__ = 'distributions'
+    id = Column(Integer, primary_key=True)
+    name = Column(String(255), nullable=False)
+    version = Column(String(255), nullable=False)
+#    platforms = relationship('Platforms', back_populates='distribution')
+#
+class Platforms(Base):
+    __tablename__ = 'platforms'
+    id = Column(Integer, primary_key=True)
+    distribution_id = Column(Integer, ForeignKey('distributions.id'), nullable=False)
+    architecture_id = Column(Integer, ForeignKey('architectures.id'), nullable=False)
+#    architecture = relationship('Architectures', back_populates='platforms')
+#    distribution = relationship('Distributions', back_populates='platforms')
+#    servers = relationship('Servers', back_populates='platform')
+#
+#class Facilities(Base):
+#    __tablename__ = 'facilities'
+#    id = Column(Integer, primary_key=True)
+#    name = Column(String(255), unique=True, nullable=False)
+##    hosts = relationship('Hosts', back_populates='server')
+#
+#class Servers(Base):
+#    __tablename__ = 'servers'
+#    id = Column(Integer, primary_key=True)
+#    platform_id = Column(Integer, ForeignKey('platforms.id'), nullable=False)
+#    name = Column(String(255), nullable=False)
+#    prefix = Column(String(255), nullable=False)
+#    platform = relationship('Platforms', back_populates='servers')
+##    hosts = relationship('Hosts', back_populates='server')
+#
+#class Hosts(Base):
+#    __tablename__ = 'hosts'
+#    id = Column(Integer, primary_key=True)
+#    facility_id = Column(Integer, ForeignKey('facilities.id'), nullable=False)
+#    server_id = Column(Integer, ForeignKey('servers.id'), nullable=False)
+#    name = Column(String(255), unique=True, nullable=False)
+##    facility = relationship('Facilities', back_populates='hosts')
+##    server = relationship('Servers', back_populates='hosts')
+#
+###################################################################################################
+#
+class Builders(Base):
+    __tablename__ = 'builders'
+
+    id = Column(Integer, primary_key=True)
+    platform_id = Column(Integer, ForeignKey('platforms.id'), nullable=False)
+    name = Column(String(255), unique=False, nullable=False)
+
+class Providers(Base):
+    __tablename__ = 'providers'
+
+    id = Column(Integer, primary_key=True)
+    url = Column(String(255), unique=True, nullable=False)
+#    repositories = relationship('Repositories', back_populates='provider')
+
+class RepositoryType(IntEnum):
+    cplusplus = 0,
+    python = 1,
+    configuration = 2,
+    shellscript = 3
+
+class Repositories(Base):
+    __tablename__ = 'repositories'
+
+    id = Column(Integer, primary_key=True)
+    provider_id = Column(Integer, ForeignKey('providers.id'), nullable=False)
+    platform_id = Column(Integer, ForeignKey('platforms.id'), nullable=False)
+    type = Column(Integer, nullable=False)
+    name = Column(String(255), nullable=False)
+    destination = Column(String(255), nullable=False)
+#    builds = relationship('Builds', back_populates='repository')
+#    provider = relationship('Providers', back_populates='repositories')
+
+class Builds(Base):
+    __tablename__ = 'builds'
+
+    id = Column(Integer, primary_key=True)
+    repository_id = Column(Integer, ForeignKey('repositories.id'), nullable=False)
+    platform_id = Column(Integer, ForeignKey('platforms.id'), nullable=False)
+    tag = Column(String(255), nullable=False)
+    date = Column(DateTime, default=datetime.datetime.now, nullable=False)
+    status = Column(Integer, nullable=True)
+    output = Column(Text, nullable=True)
+#    repository = relationship('Repositories', back_populates='builds')
+##    platform = relationship('Platforms', back_populates='builds')
+
+class Artifacts(Base):
+    __tablename__ = 'artifacts'
+
+    id = Column(Integer, primary_key=True)
+    build_id = Column(Integer, ForeignKey('builds.id'), nullable=False)
+    hash = Column(String(255), nullable=False)
+    filename = Column(String(255), nullable=False)
+#    build = db.relationship('Builds', lazy=True, backref=db.backref('artifacts', lazy=True))
+
+##class Installations(db.Model):
+##    id = db.Column(db.Integer, primary_key=True)
+##    host_id = db.Column(db.Integer, db.ForeignKey('hosts.id'), nullable=False)
+##    user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
+##    build_id = db.Column(db.Integer, db.ForeignKey('builds.id'), nullable=False)
+##    type = db.Column(db.Integer, nullable=False)
+##    date = db.Column(db.DateTime, nullable=False)
+##    host = db.relationship('Hosts', lazy=True, backref=db.backref('installations', lazy=True))
+##    user = db.relationship('Users', lazy=True, backref=db.backref('installations', lazy=True))
+##    build = db.relationship('Builds', lazy=True, backref=db.backref('installations', lazy=True))
diff --git a/lib/schema.py b/lib/schema.py
new file mode 100644
index 0000000..0b0a4a3
--- /dev/null
+++ b/lib/schema.py
@@ -0,0 +1,95 @@
+#from marshmallow import Schema, fields
+#
+#class UserSchema(Schema):
+#    id = fields.Integer(dump_only=True)
+#    name = fields.Str()
+#    admin = fields.Bool()
+#    notify = fields.Bool()
+#    class Meta:
+#        ordered = True
+##    @post_load
+##    def make_user(self, data, **kwargs):
+##        return db.Users(**data)
+#
+#user_schema = UserSchema(only=('name', 'admin', 'notify'))
+#users_schema = UserSchema(many=True)
+#
+#class DistributionSchema(Schema):
+#    id = fields.Integer(dump_only=True)
+#    name = fields.Str()
+#    version = fields.Str()
+#    formatted = fields.Method('format_output', dump_only=True)
+#    def format_output(self, distribution):
+#        return "{} {}".format(distribution.name, distribution.version)
+#
+#class ArchitectureSchema(Schema):
+#    id = fields.Integer(dump_only=True)
+#    name = fields.Str()
+#
+#class PlatformSchema(Schema):
+#    id = fields.Integer(dump_only=True)
+##    distribution_id = fields.Integer(dump_only=True)
+#    distribution = fields.Pluck('DistributionSchema', 'formatted')
+##    architecture_id = fields.Integer(dump_only=True)
+#    architecture = fields.Pluck('ArchitectureSchema', 'name')
+#    formatted = fields.Method('format_output', dump_only=True)
+#    def format_output(self, platform):
+#        return "{} {} {}".format(platform.distribution.name, 
+#                platform.distribution.version, platform.architecture.name)
+#    class Meta:
+#        ordered = True
+##    @post_load
+##    def make_platform(self, data, **kwargs):
+##        return db.Platforms(**data)
+#
+#platform_schema = PlatformSchema(only=('distribution', 'architecture'))
+#platforms_schema = PlatformSchema(many=True, only=('id', 'distribution', 'architecture'))
+#
+#class FacilitySchema(Schema):
+#    id = fields.Integer(dump_only=True)
+#    name = fields.Str()
+#    class Meta:
+#        ordered = True
+##    @post_load
+##    def make_facility(self, data, **kwargs):
+##        return db.Facilities(**data)
+#
+#facility_schema = FacilitySchema(only=('name',))
+#facilities_schema = FacilitySchema(many=True)
+#
+#class ServerSchema(Schema):
+#    id = fields.Integer(dump_only=True)
+#    name = fields.Str()
+#    prefix = fields.Str()
+##    platform_id = fields.Integer(dump_only=True)
+#    platform = fields.Pluck('PlatformSchema', 'formatted')
+#    formatted = fields.Method('format_output', dump_only=True)
+#    def format_output(self, server):
+#        return "{} {} {} {} {}".format(server.name, server.prefix, 
+#                server.platform.distribution.name,
+#                server.platform.distribution.version, 
+#                server.platform.architecture.name)
+#    class Meta:
+#        ordered = True
+##    @post_load
+##    def make_server(self, data, **kwargs):
+##        return db.Servers(**data)
+#
+#server_schema = ServerSchema(only=('name', 'prefix','platform'))
+#servers_schema = ServerSchema(many=True, only=('id','name','prefix','platform'))
+#
+##class HostSchema(Schema):
+##    id = fields.Integer(dump_only=True)
+##    facility_id = fields.Integer(dump_only=True)
+##    server_id = fields.Integer(dump_only=True)
+##    name = fields.Str()
+##    facility = fields.Pluck('FacilitySchema', 'name')
+##    server = fields.Pluck('ServerSchema', 'formatted')
+##    class Meta:
+##        ordered = True
+##    @post_load
+##    def make_server(self, data, **kwargs):
+##        return db.Servers(**data)
+#
+##host_schema = HostSchema(only=('name','facility','server'))
+##hosts_schema = HostSchema(many=True)
diff --git a/lib/text.py b/lib/text.py
new file mode 100644
index 0000000..a4095bf
--- /dev/null
+++ b/lib/text.py
@@ -0,0 +1,48 @@
+def discovermaxlength(lengths, obj):
+    for k, v in obj.items():
+        if lengths.get(k) == None:
+            lengths[k] = max(len(k), len(str(v)))
+        else:
+            lengths[k] = max(lengths[k], len(k), len(str(v)))
+
+def dumpheader(lengths, obj):
+    firstline, line, endline = "+", "|", "+"
+    for k, v in obj.items():
+        firstline += "-" + "-" * lengths[k] + "-+"
+        if type(v) == int:
+            line += " " +  k.center(lengths[k]) + " |"
+        else:
+            line += " " +  k.ljust(lengths[k]) + " |"
+        endline += "=" + "=" * lengths[k] + "=+"
+    return firstline + "\n" + line + "\n" + endline
+
+def dumpdata(lengths, obj):
+    interline, line = "+", "|"
+    for k, v in obj.items():
+        interline += "-" + "-" * lengths[k] + "-+"
+        if type(v) == int:
+            line += " " + str(v).rjust(lengths[k]) + " |"
+        else:
+            line += " " + str(v).ljust(lengths[k]) + " |"
+    return "\n" + line + "\n" + interline
+
+def dumps(data):
+    buffer = ""
+    lengths = {}
+    for key, value in data.items():
+        if isinstance(value, str):
+            buffer += key + ": " + value
+
+        if isinstance(value, dict):
+            value = [ value ]
+
+        if isinstance(value, list):
+            for item in value:
+                discovermaxlength(lengths, item)
+
+            for idx in range(len(value)):
+                if idx == 0:
+                    buffer += dumpheader(lengths, value[0])
+                buffer += dumpdata(lengths, value[idx])
+
+    return buffer
-- 
GitLab