2019-05-30 14:04:08 +00:00
|
|
|
# coding: utf8
|
|
|
|
|
|
|
|
# toutes les chaines sont en unicode (même les docstrings)
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
from pprint import pprint
|
2019-07-18 19:05:15 +00:00
|
|
|
from colorama import init, Fore
|
2019-05-30 14:04:08 +00:00
|
|
|
from rocketchat_API.rocketchat import RocketChat
|
|
|
|
import json
|
|
|
|
import dev_config as cfg
|
2019-06-05 10:18:42 +00:00
|
|
|
import os
|
2019-06-07 16:59:06 +00:00
|
|
|
import re
|
2019-07-04 19:15:35 +00:00
|
|
|
from common.channelhelper import getNodesOrigin, getAllChannels, Tsunami
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
2019-07-18 19:05:15 +00:00
|
|
|
init()
|
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
colorInfo = {
|
|
|
|
'global': 'orange',
|
|
|
|
'technologie': 'gray',
|
|
|
|
'democratie': 'red',
|
|
|
|
'ecologie': 'green',
|
|
|
|
'project': 'blue'
|
|
|
|
}
|
|
|
|
|
|
|
|
rocket = RocketChat(cfg.rocket['user'], cfg.rocket['password'], server_url=cfg.rocket['server'])
|
|
|
|
|
|
|
|
sizebase = 100
|
|
|
|
datas = []
|
|
|
|
datas.append( { 'data':{'id':'global', 'label': 'global', 'size': sizebase, 'color': colorInfo['global'], 'href': 'https://coa.crapaud-fou.org/'}})
|
|
|
|
datas.append( { 'data':{'id':'ecologie', 'label': 'ecologie', 'size': sizebase, 'color': colorInfo['ecologie'], 'href': 'https://coa.crapaud-fou.org/'}})
|
|
|
|
datas.append( { 'data':{'id':'democratie', 'label': 'democratie', 'size': sizebase, 'color': colorInfo['democratie'], 'href': 'https://coa.crapaud-fou.org/'}})
|
|
|
|
datas.append( { 'data':{'id':'technologie', 'label': 'technologie', 'size': sizebase, 'color': colorInfo['technologie'], 'href': 'https://coa.crapaud-fou.org/'}})
|
|
|
|
datas.append( { 'data':{'id':'project', 'label': 'projet', 'size': sizebase, 'color': colorInfo['project'], 'href': 'https://coa.crapaud-fou.org/'}})
|
|
|
|
|
|
|
|
cohortes = { 'fr': { 'updateMap': 'france_fr'}}
|
|
|
|
cohortescolor = { 'fr': 'green' }
|
|
|
|
nbChannels = 0
|
|
|
|
nbCohorte = 0
|
|
|
|
for channel in getAllChannels(rocket):
|
2019-06-22 12:11:52 +00:00
|
|
|
print("{}".format(channel['name']))
|
|
|
|
|
2019-05-30 14:04:08 +00:00
|
|
|
if channel['name'].find('cohorte') != -1:
|
2019-06-07 16:59:06 +00:00
|
|
|
if 'description' in channel:
|
2019-06-09 12:59:22 +00:00
|
|
|
m = re.findall(r'#([\w-]+)', channel['description'])
|
2019-07-18 19:05:15 +00:00
|
|
|
if m.count == 0:
|
|
|
|
print(Fore.RED + "\tmissing region information", 'red')
|
2019-06-07 16:59:06 +00:00
|
|
|
for region in m:
|
|
|
|
cohortescolor.update( { region: 'green' } )
|
|
|
|
cohortes.update( { region: { 'link': channel['name']}})
|
2019-05-30 14:04:08 +00:00
|
|
|
nbCohorte += 1
|
|
|
|
continue
|
|
|
|
|
2019-06-06 16:43:53 +00:00
|
|
|
size = channel['usersCount']
|
|
|
|
|
|
|
|
if (channel['_id'] == 'GENERAL') or (channel['_id'] == 'rp5gdRrZubMKic3Nk') :
|
|
|
|
size = sizebase
|
|
|
|
|
2019-05-30 14:04:08 +00:00
|
|
|
node = {
|
|
|
|
'data' : {
|
|
|
|
'id': channel['_id'],
|
|
|
|
'label': channel['fname'] if 'fname' in channel else channel['name'],
|
2019-06-06 16:43:53 +00:00
|
|
|
'size': size,
|
2019-05-30 14:04:08 +00:00
|
|
|
'color': 'grey',
|
2019-07-04 19:15:35 +00:00
|
|
|
'href': f"{cfg.rocket['server']}/channel/{channel['name']}"
|
2019-05-30 14:04:08 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
datas.append(node)
|
|
|
|
|
|
|
|
nodesOrigin = getNodesOrigin(channel)
|
|
|
|
for nodeOrigin in nodesOrigin:
|
|
|
|
if nodeOrigin is not None:
|
2019-06-12 16:19:01 +00:00
|
|
|
datas.append( { 'data':{'source': nodeOrigin, 'target': channel['_id'], 'color': colorInfo[nodeOrigin]}})
|
2019-05-30 14:04:08 +00:00
|
|
|
|
|
|
|
nbChannels += 1
|
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
# Récupération du répertoire racine du repo
|
|
|
|
rootFolder = os.path.join(os.path.dirname(__file__), '..')
|
|
|
|
# Répertoire pour stocker le fichier de sortie
|
|
|
|
dataFolder = os.path.join(rootFolder, 'public','data')
|
|
|
|
# Faut il essayer de le créer au cas ou?
|
|
|
|
# os.makedirs(dataFolderPath, exist_ok=True)
|
|
|
|
channelsFilePath = os.path.abspath(os.path.join(dataFolder,'channelslist.json'))
|
2019-05-30 14:04:08 +00:00
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
#print("Ecriture dans : "+channelsFilePath)
|
2019-06-05 14:57:23 +00:00
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
with open(channelsFilePath, "w") as file_write:
|
|
|
|
json.dump(datas, file_write)
|
2019-06-05 10:18:42 +00:00
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
cohortecolorFilePath = os.path.abspath(os.path.join(dataFolder,'cohortescolor.json'))
|
|
|
|
with open(cohortecolorFilePath, "w") as file_write:
|
|
|
|
json.dump(cohortescolor, file_write)
|
2019-05-30 14:04:08 +00:00
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
cohorteFilePath = os.path.abspath(os.path.join(dataFolder,'cohorteslist.json'))
|
|
|
|
with open(cohorteFilePath, "w") as file_write:
|
|
|
|
json.dump(cohortes, file_write)
|
2019-06-07 16:59:06 +00:00
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
pprint("Nb displayed channels : " + str(nbChannels))
|
|
|
|
pprint("Nb cohorte channels : " + str(nbCohorte))
|
2019-06-07 16:59:06 +00:00
|
|
|
|
2019-07-04 19:15:35 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|