repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
rep/hpfeeds | tests/test_client_integration_py2.py | 1 | 1398 | import logging
import threading
import unittest
from hpfeeds import client
from hpfeeds.protocol import readpublish
from .fakebroker import FakeBroker, setup_default_reactor
class TestClientIntegration(unittest.TestCase):
log = logging.getLogger('hpfeeds.testserver')
def _server_thread(self):
self.reactor = setup_default_reactor(self)
self.server.start()
self.reactor.run(installSignalHandlers=False)
def setUp(self):
self.server = FakeBroker()
self.server_thread = threading.Thread(
target=self._server_thread,
)
self.server_thread.start()
def test_subscribe_and_publish(self):
c = client.new('127.0.0.1', self.server.port, 'test', 'secret')
c.subscribe('test-chan')
c._subscribe()
c.publish('test-chan', b'data')
opcode, data = c._read_message()
assert opcode == 3
assert readpublish(data) == ('test', 'test-chan', b'data')
self.log.debug('Stopping client')
c.stop()
self.log.debug('Closing client')
c.close()
def tearDown(self):
self.log.debug('Cancelling future')
self.server.close()
self.reactor.callFromThread(self.reactor.stop)
self.log.debug('Waiting')
self.server_thread.join()
assert len(self.server.connections) == 0, 'Connection left dangling'
| gpl-3.0 | 5,969,301,653,231,483,000 | 25.377358 | 76 | 0.633763 | false |
geosim/QAD | qad_joindisjoin_cmd.py | 1 | 25015 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
QAD Quantum Aided Design plugin
comando JOIN e DISJOIN per aggregare e disgregare le geometrie
(multipoint, multilinestring, poligon e multipoligon)
-------------------
begin : 2016-04-06
copyright : iiiii
email : hhhhh
developers : bbbbb aaaaa ggggg
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from qad_generic_cmd import QadCommandClass
from qad_snapper import *
from qad_getpoint import *
from qad_ssget_cmd import QadSSGetClass
from qad_msg import QadMsg
from qad_textwindow import *
import qad_utils
import qad_layer
from qad_variables import *
from qad_entsel_cmd import QadEntSelClass
# Classe che gestisce il comando JOIN
class QadJOINCommandClass(QadCommandClass):
def instantiateNewCmd(self):
""" istanzia un nuovo comando dello stesso tipo """
return QadJOINCommandClass(self.plugIn)
def getName(self):
return QadMsg.translate("Command_list", "JOIN")
def getEnglishName(self):
return "JOIN"
def connectQAction(self, action):
QObject.connect(action, SIGNAL("triggered()"), self.plugIn.runJOINCommand)
def getIcon(self):
return QIcon(":/plugins/qad/icons/join.png")
def getNote(self):
# impostare le note esplicative del comando
return QadMsg.translate("Command_JOIN", "Join existing geometries.")
def __init__(self, plugIn):
QadCommandClass.__init__(self, plugIn)
self.entity = QadEntity()
self.SSGetClass = None
self.entSelClass = None
def __del__(self):
QadCommandClass.__del__(self)
if self.SSGetClass is not None: del self.SSGetClass
def getPointMapTool(self, drawMode = QadGetPointDrawModeEnum.NONE):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getPointMapTool(drawMode)
elif self.step == 2: # quando si é in fase di selezione gruppo entità
return self.SSGetClass.getPointMapTool()
else:
return QadCommandClass.getPointMapTool(self, drawMode)
def getCurrentContextualMenu(self):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getCurrentContextualMenu()
elif self.step == 2: # quando si é in fase di selezione gruppo entità
return self.SSGetClass.getCurrentContextualMenu()()
else:
return self.contextualMenu
def reinitSSGetClass(self):
if self.SSGetClass is not None: del self.SSGetClass
self.SSGetClass = QadSSGetClass(self.plugIn)
self.SSGetClass.onlyEditableLayers = True
self.SSGetClass.checkDimLayers = False # scarto le quote
geometryType = self.entity.layer.geometryType()
if geometryType == QGis.Point:
self.SSGetClass.checkPointLayer = True
self.SSGetClass.checkLineLayer = False
self.SSGetClass.checkPolygonLayer = False
elif geometryType == QGis.Line:
self.SSGetClass.checkPointLayer = False
self.SSGetClass.checkLineLayer = True
self.SSGetClass.checkPolygonLayer = True
elif geometryType == QGis.Polygon:
self.SSGetClass.checkPointLayer = False
self.SSGetClass.checkLineLayer = True
self.SSGetClass.checkPolygonLayer = True
#============================================================================
# addEntitySetToPoint
#============================================================================
def addEntitySetToPoint(self, entitySet, removeOriginals = True):
"""
Aggiunge il set di entità al punto da modificare
"""
geom = self.entity.getGeometry()
layerList = []
layerList.append(self.entity.layer)
for layerEntitySet in entitySet.layerEntitySetList:
layer = layerEntitySet.layer
if layer.geometryType() != QGis.Point:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
if removeOriginals: layerList.append(layer)
coordTransform = QgsCoordinateTransform(layer.crs(), self.entity.layer.crs())
for featureId in layerEntitySet.featureIds:
# se la feature è quella di entity è errore
if layer.id() == self.entity.layerId() and featureId == self.entity.featureId:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = layerEntitySet.getFeature(featureId)
# trasformo la geometria nel crs del layer dell'entità da modificare
geomToAdd = f.geometry()
geomToAdd.transform(coordTransform)
simplifiedGeoms = qad_utils.asPointOrPolyline(geomToAdd)
for simplifiedGeom in simplifiedGeoms:
point = simplifiedGeom.asPoint()
# aggiungo una parte
if geom.addPart([point]) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
layerList = entitySet.getLayerList()
layerList.append(self.entity.layer)
self.plugIn.beginEditCommand("Feature edited", layerList)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
if removeOriginals:
for layerEntitySet in entitySet.layerEntitySetList:
if qad_layer.deleteFeaturesToLayer(self.plugIn, layerEntitySet.layer, layerEntitySet.featureIds, False) == False:
self.plugIn.destroyEditCommand()
return
self.plugIn.endEditCommand()
return True
#============================================================================
# addEntitySetToPolyline
#============================================================================
def addEntitySetToPolyline(self, entitySet, removeOriginals = True):
"""
Aggiunge il set di entità alla polilinea da modificare
"""
geom = self.entity.getGeometry()
layerList = []
layerList.append(self.entity.layer)
for layerEntitySet in entitySet.layerEntitySetList:
layer = layerEntitySet.layer
if layer.geometryType() != QGis.Polygon and layer.geometryType() != QGis.Line:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
if removeOriginals: layerList.append(layer)
coordTransform = QgsCoordinateTransform(layer.crs(), self.entity.layer.crs())
for featureId in layerEntitySet.featureIds:
# se la feature è quella di entity è errore
if layer.id() == self.entity.layerId() and featureId == self.entity.featureId:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = layerEntitySet.getFeature(featureId)
# trasformo la geometria nel crs del layer dell'entità da modificare
geomToAdd = f.geometry()
geomToAdd.transform(coordTransform)
# Riduco la geometria in point o polyline
simplifiedGeoms = qad_utils.asPointOrPolyline(geomToAdd)
for simplifiedGeom in simplifiedGeoms:
points = simplifiedGeom.asPolyline() # vettore di punti
# aggiungo una parte
if geom.addPart(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
layerList = entitySet.getLayerList()
layerList.append(self.entity.layer)
self.plugIn.beginEditCommand("Feature edited", layerList)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
if removeOriginals:
for layerEntitySet in entitySet.layerEntitySetList:
if qad_layer.deleteFeaturesToLayer(self.plugIn, layerEntitySet.layer, layerEntitySet.featureIds, False) == False:
self.plugIn.destroyEditCommand()
return
self.plugIn.endEditCommand()
return True
#============================================================================
# addEntitySetToPolygon
#============================================================================
def addEntitySetToPolygon(self, entitySet, removeOriginals = True):
"""
Aggiunge il set di entità al poligono da modificare
"""
geom = self.entity.getGeometry()
layerList = []
layerList.append(self.entity.layer)
for layerEntitySet in entitySet.layerEntitySetList:
layer = layerEntitySet.layer
if layer.geometryType() != QGis.Polygon and layer.geometryType() != QGis.Line:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
if removeOriginals: layerList.append(layer)
coordTransform = QgsCoordinateTransform(layer.crs(), self.entity.layer.crs())
for featureId in layerEntitySet.featureIds:
# se la feature è quella di entity è errore
if layer.id() == self.entity.layerId() and featureId == self.entity.featureId:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = layerEntitySet.getFeature(featureId)
# trasformo la geometria nel crs del layer del poligono da modificare
geomToAdd = f.geometry()
geomToAdd.transform(coordTransform)
# se il poligono è contenuto nella geometria da aggiungere
if geomToAdd.contains(geom):
# Riduco la geometria in point o polyline
simplifiedGeoms = qad_utils.asPointOrPolyline(geom)
# deve essere un poligono senza ring
if len(simplifiedGeoms) != 1 or simplifiedGeoms[0].wkbType() != QGis.WKBLineString:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
points = simplifiedGeoms[0].asPolyline() # vettore di punti
# aggiungo un'isola
if geomToAdd.addRing(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
del geom
geom = QgsGeometry.fromPolygon(geomToAdd.asPolygon())
else: # se il poligono non è contenuto nella geometria da aggiungere
# Riduco la geometria in point o polyline
simplifiedGeoms = qad_utils.asPointOrPolyline(geomToAdd)
for simplifiedGeom in simplifiedGeoms:
points = simplifiedGeom.asPolyline() # vettore di punti
# se la geometria da aggiungere è contenuta nel poligono
if geom.contains(QgsGeometry.fromPolyline(points)):
# aggiungo un'isola
if geom.addRing(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
else:
# aggiungo una parte
if geom.addPart(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
layerList = entitySet.getLayerList()
layerList.append(self.entity.layer)
self.plugIn.beginEditCommand("Feature edited", layerList)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
if removeOriginals:
for layerEntitySet in entitySet.layerEntitySetList:
if qad_layer.deleteFeaturesToLayer(self.plugIn, layerEntitySet.layer, layerEntitySet.featureIds, False) == False:
self.plugIn.destroyEditCommand()
return
self.plugIn.endEditCommand()
return True
#============================================================================
# waitForEntsel
#============================================================================
def waitForEntsel(self, msgMapTool, msg):
if self.entSelClass is not None:
del self.entSelClass
self.step = 1
self.entSelClass = QadEntSelClass(self.plugIn)
self.entSelClass.msg = QadMsg.translate("Command_JOIN", "Select object to join to: ")
# scarto la selezione di quote
self.entSelClass.checkDimLayers = False
self.entSelClass.onlyEditableLayers = True
self.entSelClass.deselectOnFinish = True
self.entSelClass.run(msgMapTool, msg)
#============================================================================
# waitForSSsel
#============================================================================
def waitForSSsel(self, msgMapTool, msg):
self.reinitSSGetClass()
self.step = 2
self.showMsg(QadMsg.translate("Command_JOIN", "\nSelect objects to join: "))
self.SSGetClass.run(msgMapTool, msg)
def run(self, msgMapTool = False, msg = None):
if self.plugIn.canvas.mapSettings().destinationCrs().geographicFlag():
self.showMsg(QadMsg.translate("QAD", "\nThe coordinate reference system of the project must be a projected coordinate system.\n"))
return True # fine comando
if self.step == 0:
self.waitForEntsel(msgMapTool, msg) # seleziona l'oggetto a cui aggregarsi
return False # continua
#=========================================================================
# RISPOSTA ALLA SELEZIONE ENTITA' DA MODIFICARE
elif self.step == 1:
if self.entSelClass.run(msgMapTool, msg) == True:
if self.entSelClass.entity.isInitialized():
self.entity.set(self.entSelClass.entity)
self.waitForSSsel(msgMapTool, msg)
else:
if self.entSelClass.canceledByUsr == True: # fine comando
return True
self.showMsg(QadMsg.translate("QAD", "No geometries in this position."))
self.waitForEntsel(msgMapTool, msg)
return False # continua
#=========================================================================
# RISPOSTA ALLA RICHIESTA DEL GRUPPO DI SELEZIONE (da step = 1)
elif self.step == 2:
if self.SSGetClass.run(msgMapTool, msg) == True:
if self.SSGetClass.entitySet.count() > 0:
geometryType = self.entity.layer.geometryType()
if geometryType == QGis.Point:
self.addEntitySetToPoint(self.SSGetClass.entitySet)
elif geometryType == QGis.Line:
self.addEntitySetToPolyline(self.SSGetClass.entitySet)
elif geometryType == QGis.Polygon:
self.addEntitySetToPolygon(self.SSGetClass.entitySet)
return True
self.waitForSSsel(msgMapTool, msg)
return False
# Classe che gestisce il comando DISJOIN
class QadDISJOINCommandClass(QadCommandClass):
def instantiateNewCmd(self):
""" istanzia un nuovo comando dello stesso tipo """
return QadDISJOINCommandClass(self.plugIn)
def getName(self):
return QadMsg.translate("Command_list", "DISJOIN")
def getEnglishName(self):
return "DISJOIN"
def connectQAction(self, action):
QObject.connect(action, SIGNAL("triggered()"), self.plugIn.runDISJOINCommand)
def getIcon(self):
return QIcon(":/plugins/qad/icons/disjoin.png")
def getNote(self):
# impostare le note esplicative del comando
return QadMsg.translate("Command_DISJOIN", "Disjoin existing geometries.")
def __init__(self, plugIn):
QadCommandClass.__init__(self, plugIn)
self.entity = QadEntity()
self.SSGetClass = QadSSGetClass(plugIn)
self.SSGetClass.onlyEditableLayers = False
self.SSGetClass.checkDimLayers = False # scarto le quote
self.entSelClass = None
self.currSubGeom = None
self.currAtSubGeom = None
def __del__(self):
QadCommandClass.__del__(self)
del self.SSGetClass
def getPointMapTool(self, drawMode = QadGetPointDrawModeEnum.NONE):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getPointMapTool(drawMode)
else:
return QadCommandClass.getPointMapTool(self, drawMode)
def getCurrentContextualMenu(self):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getCurrentContextualMenu()
else:
return self.contextualMenu
#============================================================================
# setCurrentSubGeom
#============================================================================
def setCurrentSubGeom(self, entSelClass):
"""
Setta la sottogeometria corrente
"""
self.currSubGeom = None
self.currAtSubGeom = None
# verifico che sia stata selezionata un'entità
if entSelClass.entity.isInitialized() == False:
self.showMsg(QadMsg.translate("QAD", "No geometries in this position."))
return False
# verifico che sia stata selezionata attraverso un punto
# (per capire quale sottogeometria è stata selezionata)
if entSelClass.point is None: return False
self.entity.set(entSelClass.entity)
geom = self.layerToMapCoordinates(entSelClass.entity.layer, entSelClass.entity.getGeometry())
# ritorna una tupla (<The squared cartesian distance>,
# <minDistPoint>
# <afterVertex>
# <leftOf>)
dummy = qad_utils.closestSegmentWithContext(entSelClass.point, geom)
if dummy[2] is None:
return False
# ritorna la sotto-geometria al vertice <atVertex> e la sua posizione nella geometria (0-based)
self.currSubGeom, self.currAtSubGeom = qad_utils.getSubGeomAtVertex(geom, dummy[2])
if self.currSubGeom is None or self.currAtSubGeom is None:
self.currSubGeom = None
self.currAtSubGeom = None
return False
return True
#============================================================================
# disjoinCurrentSubGeomToPolygon
#============================================================================
def disjoinCurrentSubGeomToPolygon(self):
"""
Sconnette la sotto-geometria corrente del poligono da modificare creando una nuova entità
"""
layer = self.entity.layer
# la posizione é espressa con una lista (<index ogg. princ> [<index ogg. sec.>])
part = self.currAtSubGeom[0]
ring = self.currAtSubGeom[1] if len(self.currAtSubGeom) == 2 else None
geom = self.entity.getGeometry()
wkbType = geom.wkbType()
if wkbType == QGis.WKBMultiPoint or wkbType == QGis.WKBMultiLineString:
if geom.deletePart(part) == False: # disgrego una parte
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
newGeom = self.mapToLayerCoordinates(layer, self.currSubGeom)
elif wkbType == QGis.WKBPolygon or wkbType == QGis.WKBMultiPolygon:
if ring is not None: # disgrego un'isola
if geom.deleteRing(ring + 1, part) == False: # cancello una isola (Ring 0 is outer ring and can't be deleted)
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
newGeom = QgsGeometry.fromPolygon([self.mapToLayerCoordinates(layer, self.currSubGeom).asPolyline()])
else: # disgrego una parte
if wkbType == QGis.WKBPolygon:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
newGeom = QgsGeometry.fromPolygon([self.mapToLayerCoordinates(layer, self.currSubGeom).asPolyline()])
ring = 0
ringGeom = qad_utils.getSubGeomAt(geom, [part, ring])
# se la parte ha delle isole
while ringGeom is not None:
# aggiungo un'isola
points = ringGeom.asPolyline() # vettore di punti
if newGeom.addRing(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
ring = ring + 1
ringGeom = qad_utils.getSubGeomAt(geom, [part, ring])
if geom.deletePart(part) == False: # cancello una parte
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
else:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
self.plugIn.beginEditCommand("Feature edited", self.entity.layer)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
# Aggiungo nuova feature
newF = QgsFeature(f)
newF.setGeometry(newGeom)
if qad_layer.addFeatureToLayer(self.plugIn, self.entity.layer, newF, None, False, False) == False:
self.plugIn.destroyEditCommand()
return False
self.plugIn.endEditCommand()
return True
#============================================================================
# waitForEntsel
#============================================================================
def waitForEntsel(self, msgMapTool, msg):
if self.entSelClass is not None:
del self.entSelClass
self.step = 1
self.entSelClass = QadEntSelClass(self.plugIn)
self.entSelClass.msg = QadMsg.translate("Command_DISJOIN", "Select object to disjoin: ")
# scarto la selezione di quote
self.entSelClass.checkDimLayers = False
self.entSelClass.onlyEditableLayers = True
self.entSelClass.deselectOnFinish = True
self.entSelClass.run(msgMapTool, msg)
def run(self, msgMapTool = False, msg = None):
if self.plugIn.canvas.mapSettings().destinationCrs().geographicFlag():
self.showMsg(QadMsg.translate("QAD", "\nThe coordinate reference system of the project must be a projected coordinate system.\n"))
return True # fine comando
if self.step == 0:
self.waitForEntsel(msgMapTool, msg) # seleziona l'oggetto da disgregare
return False # continua
#=========================================================================
# RISPOSTA ALLA SELEZIONE ENTITA' DA MODIFICARE
elif self.step == 1:
if self.entSelClass.run(msgMapTool, msg) == True:
if self.setCurrentSubGeom(self.entSelClass) == True:
if self.disjoinCurrentSubGeomToPolygon() == True:
return True
else:
if self.entSelClass.canceledByUsr == True: # fine comando
return True
self.showMsg(QadMsg.translate("QAD", "No geometries in this position."))
self.waitForEntsel(msgMapTool, msg)
return False # continua | gpl-3.0 | -2,137,112,879,578,955,000 | 39.693811 | 139 | 0.577587 | false |
pointhi/kicad-footprint-generator | scripts/Packages/Package_Gullwing__QFP_SOIC_SO/ipc_gullwing_generator.py | 1 | 30745 | #!/usr/bin/env python3
import sys
import os
import argparse
import yaml
import math
sys.path.append(os.path.join(sys.path[0], "..", "..", "..")) # load parent path of KicadModTree
from KicadModTree import * # NOQA
from KicadModTree.nodes.base.Pad import Pad # NOQA
sys.path.append(os.path.join(sys.path[0], "..", "..", "tools")) # load parent path of tools
from footprint_text_fields import addTextFields
from ipc_pad_size_calculators import *
from quad_dual_pad_border import add_dual_or_quad_pad_border
from drawing_tools import nearestSilkPointOnOrthogonalLine
sys.path.append(os.path.join(sys.path[0], "..", "utils"))
from ep_handling_utils import getEpRoundRadiusParams
ipc_density = 'nominal'
ipc_doc_file = '../ipc_definitions.yaml'
DEFAULT_PASTE_COVERAGE = 0.65
DEFAULT_VIA_PASTE_CLEARANCE = 0.15
DEFAULT_MIN_ANNULAR_RING = 0.15
def roundToBase(value, base):
return round(value/base) * base
class Gullwing():
def __init__(self, configuration):
self.configuration = configuration
with open(ipc_doc_file, 'r') as ipc_stream:
try:
self.ipc_defintions = yaml.safe_load(ipc_stream)
self.configuration['min_ep_to_pad_clearance'] = 0.2
#ToDo: find a settings file that can contain these.
self.configuration['paste_radius_ratio'] = 0.25
self.configuration['paste_maximum_radius'] = 0.25
if 'ipc_generic_rules' in self.ipc_defintions:
self.configuration['min_ep_to_pad_clearance'] = self.ipc_defintions['ipc_generic_rules'].get('min_ep_to_pad_clearance', 0.2)
except yaml.YAMLError as exc:
print(exc)
def calcPadDetails(self, device_dimensions, EP_size, ipc_data, ipc_round_base):
# Zmax = Lmin + 2JT + √(CL^2 + F^2 + P^2)
# Gmin = Smax − 2JH − √(CS^2 + F^2 + P^2)
# Xmax = Wmin + 2JS + √(CW^2 + F^2 + P^2)
# Some manufacturers do not list the terminal spacing (S) in their datasheet but list the terminal lenght (T)
# Then one can calculate
# Stol(RMS) = √(Ltol^2 + 2*^2)
# Smin = Lmin - 2*Tmax
# Smax(RMS) = Smin + Stol(RMS)
manf_tol = {
'F': self.configuration.get('manufacturing_tolerance', 0.1),
'P': self.configuration.get('placement_tolerance', 0.05)
}
Gmin_x, Zmax_x, Xmax = ipc_gull_wing(
ipc_data, ipc_round_base, manf_tol,
device_dimensions['lead_width'],
device_dimensions['overall_size_x'],
lead_len=device_dimensions.get('lead_len'),
heel_reduction=device_dimensions.get('heel_reduction', 0)
)
Gmin_y, Zmax_y, Xmax_y_ignored = ipc_gull_wing(
ipc_data, ipc_round_base, manf_tol,
device_dimensions['lead_width'],
device_dimensions['overall_size_y'],
lead_len=device_dimensions.get('lead_len'),
heel_reduction=device_dimensions.get('heel_reduction', 0)
)
min_ep_to_pad_clearance = configuration['min_ep_to_pad_clearance']
heel_reduction_max = 0
if Gmin_x - 2*min_ep_to_pad_clearance < EP_size['x']:
heel_reduction_max = ((EP_size['x'] + 2*min_ep_to_pad_clearance - Gmin_x)/2)
#print('{}, {}, {}'.format(Gmin_x, EP_size['x'], min_ep_to_pad_clearance))
Gmin_x = EP_size['x'] + 2*min_ep_to_pad_clearance
if Gmin_y - 2*min_ep_to_pad_clearance < EP_size['y']:
heel_reduction = ((EP_size['y'] + 2*min_ep_to_pad_clearance - Gmin_y)/2)
if heel_reduction>heel_reduction_max:
heel_reduction_max = heel_reduction
Gmin_y = EP_size['y'] + 2*min_ep_to_pad_clearance
Pad = {}
Pad['left'] = {'center':[-(Zmax_x+Gmin_x)/4, 0], 'size':[(Zmax_x-Gmin_x)/2,Xmax]}
Pad['right'] = {'center':[(Zmax_x+Gmin_x)/4, 0], 'size':[(Zmax_x-Gmin_x)/2,Xmax]}
Pad['top'] = {'center':[0,-(Zmax_y+Gmin_y)/4], 'size':[Xmax,(Zmax_y-Gmin_y)/2]}
Pad['bottom'] = {'center':[0,(Zmax_y+Gmin_y)/4], 'size':[Xmax,(Zmax_y-Gmin_y)/2]}
return Pad
@staticmethod
def deviceDimensions(device_size_data):
dimensions = {
'body_size_x': TolerancedSize.fromYaml(device_size_data, base_name='body_size_x'),
'body_size_y': TolerancedSize.fromYaml(device_size_data, base_name='body_size_y'),
'lead_width': TolerancedSize.fromYaml(device_size_data, base_name='lead_width'),
'lead_len': TolerancedSize.fromYaml(device_size_data, base_name='lead_len')
}
dimensions['has_EP'] = False
if 'EP_size_x_min' in device_size_data and 'EP_size_x_max' in device_size_data or 'EP_size_x' in device_size_data:
dimensions['EP_size_x'] = TolerancedSize.fromYaml(device_size_data, base_name='EP_size_x')
dimensions['EP_size_y'] = TolerancedSize.fromYaml(device_size_data, base_name='EP_size_y')
dimensions['has_EP'] = True
if 'EP_mask_x' in device_size_data:
dimensions['EP_mask_x'] = TolerancedSize.fromYaml(device_size_data, base_name='EP_mask_x')
dimensions['EP_mask_y'] = TolerancedSize.fromYaml(device_size_data, base_name='EP_mask_y')
dimensions['heel_reduction'] = device_size_data.get('heel_reduction', 0)
if 'overall_size_x' in device_size_data or 'overall_size_y' in device_size_data:
if 'overall_size_x' in device_size_data:
dimensions['overall_size_x'] = TolerancedSize.fromYaml(device_size_data, base_name='overall_size_x')
else:
dimensions['overall_size_x'] = TolerancedSize.fromYaml(device_size_data, base_name='overall_size_y')
if 'overall_size_y' in device_size_data:
dimensions['overall_size_y'] = TolerancedSize.fromYaml(device_size_data, base_name='overall_size_y')
else:
dimensions['overall_size_y'] = TolerancedSize.fromYaml(device_size_data, base_name='overall_size_x')
else:
raise KeyError("Either overall size x or overall size y must be given (Outside to outside lead dimensions)")
return dimensions
def generateFootprint(self, device_params, header):
dimensions = Gullwing.deviceDimensions(device_params)
if 'deleted_pins' in device_params:
if type(device_params['deleted_pins']) is int:
device_params['deleted_pins'] = [device_params['deleted_pins']]
if 'hidden_pins' in device_params:
if type(device_params['hidden_pins']) is int:
device_params['hidden_pins'] = [device_params['hidden_pins']]
if 'deleted_pins' in device_params and 'hidden_pins' in device_params:
print("A footprint may not have deleted pins and hidden pins.")
else:
if dimensions['has_EP'] and 'thermal_vias' in device_params:
self.__createFootprintVariant(device_params, header, dimensions, True)
self.__createFootprintVariant(device_params, header, dimensions, False)
def __createFootprintVariant(self, device_params, header, dimensions, with_thermal_vias):
fab_line_width = self.configuration.get('fab_line_width', 0.1)
silk_line_width = self.configuration.get('silk_line_width', 0.12)
lib_name = self.configuration['lib_name_format_string'].format(category=header['library_Suffix'])
size_x = dimensions['body_size_x'].nominal
size_y = dimensions['body_size_y'].nominal
pincount_full = device_params['num_pins_x']*2 + device_params['num_pins_y']*2
if 'hidden_pins' in device_params:
pincount_text = '{}-{}'.format(pincount_full - len(device_params['hidden_pins']), pincount_full)
pincount = pincount_full - len(device_params['hidden_pins'])
elif 'deleted_pins' in device_params:
pincount_text = '{}-{}'.format(pincount_full, pincount_full - len(device_params['deleted_pins']))
pincount = pincount_full - len(device_params['deleted_pins'])
else:
pincount_text = '{}'.format(pincount_full)
pincount = pincount_full
ipc_reference = 'ipc_spec_gw_large_pitch' if device_params['pitch'] >= 0.625 else 'ipc_spec_gw_small_pitch'
if device_params.get('force_small_pitch_ipc_definition', False):
ipc_reference = 'ipc_spec_gw_small_pitch'
used_density = device_params.get('ipc_density', ipc_density)
ipc_data_set = self.ipc_defintions[ipc_reference][used_density]
ipc_round_base = self.ipc_defintions[ipc_reference]['round_base']
pitch = device_params['pitch']
name_format = self.configuration['fp_name_format_string_no_trailing_zero_pincount_text']
EP_size = {'x':0, 'y':0}
EP_mask_size = {'x':0, 'y':0}
if dimensions['has_EP']:
name_format = self.configuration['fp_name_EP_format_string_no_trailing_zero_pincount_text']
if 'EP_size_x_overwrite' in device_params:
EP_size = {
'x':device_params['EP_size_x_overwrite'],
'y':device_params['EP_size_y_overwrite']
}
else:
EP_size = {
'x':dimensions['EP_size_x'].nominal,
'y':dimensions['EP_size_y'].nominal
}
if 'EP_mask_x' in dimensions:
name_format = self.configuration['fp_name_EP_custom_mask_format_string_no_trailing_zero_pincount_text']
EP_mask_size = {'x':dimensions['EP_mask_x'].nominal, 'y':dimensions['EP_mask_y'].nominal}
EP_size = Vector2D(EP_size)
pad_details = self.calcPadDetails(dimensions, EP_size, ipc_data_set, ipc_round_base)
if 'custom_name_format' in device_params:
name_format = device_params['custom_name_format']
suffix = device_params.get('suffix', '').format(pad_x=pad_details['left']['size'][0],
pad_y=pad_details['left']['size'][1])
suffix_3d = suffix if device_params.get('include_suffix_in_3dpath', 'True') == 'True' else ""
model3d_path_prefix = self.configuration.get('3d_model_prefix','${KISYS3DMOD}')
fp_name = name_format.format(
man=device_params.get('manufacturer',''),
mpn=device_params.get('part_number',''),
pkg=header['device_type'],
pincount=pincount_text,
size_y=size_y,
size_x=size_x,
pitch=device_params['pitch'],
ep_size_x = EP_size['x'],
ep_size_y = EP_size['y'],
mask_size_x = EP_mask_size['x'],
mask_size_y = EP_mask_size['y'],
suffix=suffix,
suffix2="",
vias=self.configuration.get('thermal_via_suffix', '_ThermalVias') if with_thermal_vias else ''
).replace('__','_').lstrip('_')
fp_name_2 = name_format.format(
man=device_params.get('manufacturer',''),
mpn=device_params.get('part_number',''),
pkg=header['device_type'],
pincount=pincount_text,
size_y=size_y,
size_x=size_x,
pitch=device_params['pitch'],
ep_size_x = EP_size['x'],
ep_size_y = EP_size['y'],
mask_size_x = EP_mask_size['x'],
mask_size_y = EP_mask_size['y'],
suffix=suffix_3d,
suffix2="",
vias=''
).replace('__','_').lstrip('_')
model_name = '{model3d_path_prefix:s}{lib_name:s}.3dshapes/{fp_name:s}.wrl'\
.format(
model3d_path_prefix=model3d_path_prefix, lib_name=lib_name,
fp_name=fp_name_2)
#print(fp_name)
#print(pad_details)
kicad_mod = Footprint(fp_name)
# init kicad footprint
kicad_mod.setDescription(
"{manufacturer} {mpn} {package}, {pincount} Pin ({datasheet}), generated with kicad-footprint-generator {scriptname}"\
.format(
manufacturer = device_params.get('manufacturer',''),
package = header['device_type'],
mpn = device_params.get('part_number',''),
pincount = pincount,
datasheet = device_params['size_source'],
scriptname = os.path.basename(__file__).replace(" ", " ")
).lstrip())
kicad_mod.setTags(self.configuration['keyword_fp_string']\
.format(
man=device_params.get('manufacturer',''),
package=header['device_type'],
category=header['library_Suffix']
).lstrip())
kicad_mod.setAttribute('smd')
if 'custom_pad_layout' in device_params:
pad_radius = add_custom_pad_layout(kicad_mod, configuration, pad_details, device_params)
else:
pad_radius = add_dual_or_quad_pad_border(kicad_mod, configuration, pad_details, device_params)
EP_round_radius = 0
if dimensions['has_EP']:
pad_shape_details = getEpRoundRadiusParams(device_params, self.configuration, pad_radius)
EP_mask_size = EP_mask_size if EP_mask_size['x'] > 0 else None
if with_thermal_vias:
thermals = device_params['thermal_vias']
paste_coverage = thermals.get('EP_paste_coverage',
device_params.get('EP_paste_coverage', DEFAULT_PASTE_COVERAGE))
EP = ExposedPad(
number=pincount+1, size=EP_size, mask_size=EP_mask_size,
paste_layout=thermals.get('EP_num_paste_pads'),
paste_coverage=paste_coverage,
via_layout=thermals.get('count', 0),
paste_between_vias=thermals.get('paste_between_vias'),
paste_rings_outside=thermals.get('paste_rings_outside'),
via_drill=thermals.get('drill', 0.3),
via_grid=thermals.get('grid'),
paste_avoid_via=thermals.get('paste_avoid_via', True),
via_paste_clarance=thermals.get('paste_via_clearance', DEFAULT_VIA_PASTE_CLEARANCE),
min_annular_ring=thermals.get('min_annular_ring', DEFAULT_MIN_ANNULAR_RING),
bottom_pad_min_size=thermals.get('bottom_min_size', 0),
**pad_shape_details
)
else:
EP = ExposedPad(
number=pincount+1, size=EP_size, mask_size=EP_mask_size,
paste_layout=device_params.get('EP_num_paste_pads', 1),
paste_coverage=device_params.get('EP_paste_coverage', DEFAULT_PASTE_COVERAGE),
**pad_shape_details
)
kicad_mod.append(EP)
EP_round_radius = EP.getRoundRadius()
body_edge = {
'left': -dimensions['body_size_x'].nominal/2,
'right': dimensions['body_size_x'].nominal/2,
'top': -dimensions['body_size_y'].nominal/2,
'bottom': dimensions['body_size_y'].nominal/2
}
bounding_box = {
'left': pad_details['left']['center'][0] - pad_details['left']['size'][0]/2,
'right': pad_details['right']['center'][0] + pad_details['right']['size'][0]/2,
'top': pad_details['top']['center'][1] - pad_details['top']['size'][1]/2,
'bottom': pad_details['bottom']['center'][1] + pad_details['bottom']['size'][1]/2
}
if device_params['num_pins_x'] == 0:
bounding_box['top'] = body_edge['top']
bounding_box['bottom'] = body_edge['bottom']
if EP_size['y'] > dimensions['body_size_y'].nominal:
bounding_box['top'] = -EP_size['y']/2
bounding_box['bottom'] = EP_size['y']/2
if device_params['num_pins_y'] == 0:
bounding_box['left'] = body_edge['left']
bounding_box['right'] = body_edge['right']
if EP_size['x'] > dimensions['body_size_x'].nominal:
bounding_box['left'] = -EP_size['x']/2
bounding_box['right'] = EP_size['x']/2
pad_width = pad_details['top']['size'][0]
# ############################ SilkS ##################################
silk_pad_offset = configuration['silk_pad_clearance'] + configuration['silk_line_width']/2
silk_offset = configuration['silk_fab_offset']
right_pads_silk_bottom = (device_params['num_pins_y']-1)*device_params['pitch']/2\
+pad_details['right']['size'][1]/2+silk_pad_offset
silk_bottom = body_edge['bottom']+silk_offset
if EP_size['y']/2 <= body_edge['bottom'] and right_pads_silk_bottom >= silk_bottom:
silk_bottom = max(silk_bottom, EP_size['y']/2+silk_pad_offset)
silk_bottom = max(silk_bottom, right_pads_silk_bottom)
silk_bottom = min(body_edge['bottom']+silk_pad_offset, silk_bottom)
bottom_pads_silk_right = (device_params['num_pins_x']-1)*device_params['pitch']/2\
+pad_details['bottom']['size'][0]/2+silk_pad_offset
silk_right = body_edge['right']+silk_offset
if EP_size['x']/2 <= body_edge['right'] and bottom_pads_silk_right >= silk_right:
silk_right = max(silk_right, EP_size['x']/2+silk_pad_offset)
silk_right = max(silk_right, bottom_pads_silk_right)
silk_right = min(body_edge['right']+silk_pad_offset, silk_right)
min_lenght = configuration.get('silk_line_lenght_min', 0)
silk_corner_bottom_right = Vector2D(silk_right, silk_bottom)
silk_point_bottom_inside = nearestSilkPointOnOrthogonalLine(
pad_size=EP_size,
pad_position=[0, 0],
pad_radius=EP_round_radius,
fixed_point=silk_corner_bottom_right,
moving_point=Vector2D(0, silk_bottom),
silk_pad_offset=silk_pad_offset,
min_lenght=min_lenght)
if silk_point_bottom_inside is not None and device_params['num_pins_x'] > 0:
silk_point_bottom_inside = nearestSilkPointOnOrthogonalLine(
pad_size=pad_details['bottom']['size'],
pad_position=[
pad_details['bottom']['center'][0]+(device_params['num_pins_x']-1)/2*pitch,
pad_details['bottom']['center'][1]],
pad_radius=pad_radius,
fixed_point=silk_corner_bottom_right,
moving_point=silk_point_bottom_inside,
silk_pad_offset=silk_pad_offset,
min_lenght=min_lenght)
silk_point_right_inside = nearestSilkPointOnOrthogonalLine(
pad_size=EP_size,
pad_position=[0, 0],
pad_radius=EP_round_radius,
fixed_point=silk_corner_bottom_right,
moving_point=Vector2D(silk_right, 0),
silk_pad_offset=silk_pad_offset,
min_lenght=min_lenght)
if silk_point_right_inside is not None and device_params['num_pins_y'] > 0:
silk_point_right_inside = nearestSilkPointOnOrthogonalLine(
pad_size=pad_details['right']['size'],
pad_position=[
pad_details['right']['center'][0],
pad_details['right']['center'][1]+(device_params['num_pins_y']-1)/2*pitch],
pad_radius=pad_radius,
fixed_point=silk_corner_bottom_right,
moving_point=silk_point_right_inside,
silk_pad_offset=silk_pad_offset,
min_lenght=min_lenght)
if silk_point_bottom_inside is None and silk_point_right_inside is not None:
silk_corner_bottom_right['y'] = body_edge['bottom']
silk_corner_bottom_right = nearestSilkPointOnOrthogonalLine(
pad_size=pad_details['bottom']['size'],
pad_position=[
pad_details['bottom']['center'][0]+(device_params['num_pins_x']-1)/2*pitch,
pad_details['bottom']['center'][1]],
pad_radius=pad_radius,
fixed_point=silk_point_right_inside,
moving_point=silk_corner_bottom_right,
silk_pad_offset=silk_pad_offset,
min_lenght=min_lenght)
elif silk_point_right_inside is None and silk_point_bottom_inside is not None:
silk_corner_bottom_right['x'] = body_edge['right']
silk_corner_bottom_right = nearestSilkPointOnOrthogonalLine(
pad_size=pad_details['right']['size'],
pad_position=[
pad_details['right']['center'][0],
pad_details['right']['center'][1]+(device_params['num_pins_y']-1)/2*pitch],
pad_radius=pad_radius,
fixed_point=silk_point_bottom_inside,
moving_point=silk_corner_bottom_right,
silk_pad_offset=silk_pad_offset,
min_lenght=min_lenght)
poly_bottom_right = []
if silk_point_bottom_inside is not None:
poly_bottom_right.append(silk_point_bottom_inside)
poly_bottom_right.append(silk_corner_bottom_right)
if silk_point_right_inside is not None:
poly_bottom_right.append(silk_point_right_inside)
if len(poly_bottom_right) > 1 and silk_corner_bottom_right is not None:
kicad_mod.append(PolygoneLine(
polygone=poly_bottom_right,
width=configuration['silk_line_width'],
layer="F.SilkS"))
kicad_mod.append(PolygoneLine(
polygone=poly_bottom_right,
width=configuration['silk_line_width'],
layer="F.SilkS", x_mirror=0))
kicad_mod.append(PolygoneLine(
polygone=poly_bottom_right,
width=configuration['silk_line_width'],
layer="F.SilkS", y_mirror=0))
if device_params['num_pins_y'] > 0:
if len(poly_bottom_right)>2:
kicad_mod.append(PolygoneLine(
polygone=poly_bottom_right,
width=configuration['silk_line_width'],
layer="F.SilkS", y_mirror=0, x_mirror=0))
kicad_mod.append(Line(
start={'x': -silk_right, 'y': -right_pads_silk_bottom},
end={'x': bounding_box['left'], 'y': -right_pads_silk_bottom},
width=configuration['silk_line_width'],
layer="F.SilkS"))
elif silk_corner_bottom_right['y'] >= right_pads_silk_bottom and silk_point_bottom_inside is not None:
kicad_mod.append(Line(
start=-silk_point_bottom_inside,
end={'x': bounding_box['left'], 'y': -silk_point_bottom_inside['y']},
width=configuration['silk_line_width'],
layer="F.SilkS"))
else:
if len(poly_bottom_right)>2:
poly_bottom_right[0]['x']=bottom_pads_silk_right
kicad_mod.append(PolygoneLine(
polygone=poly_bottom_right,
width=configuration['silk_line_width'],
layer="F.SilkS", y_mirror=0, x_mirror=0))
kicad_mod.append(Line(
start={'x': -bottom_pads_silk_right, 'y': -silk_corner_bottom_right['y']},
end={'x': -bottom_pads_silk_right, 'y': bounding_box['top']},
width=configuration['silk_line_width'],
layer="F.SilkS"))
elif silk_corner_bottom_right['x'] >= bottom_pads_silk_right and silk_point_right_inside is not None:
kicad_mod.append(Line(
start=-silk_point_right_inside,
end={'x': -silk_point_right_inside['x'], 'y': bounding_box['top']},
width=configuration['silk_line_width'],
layer="F.SilkS"))
# # ######################## Fabrication Layer ###########################
fab_bevel_size = min(configuration['fab_bevel_size_absolute'], configuration['fab_bevel_size_relative']*min(size_x, size_y))
poly_fab = [
{'x': body_edge['left']+fab_bevel_size, 'y': body_edge['top']},
{'x': body_edge['right'], 'y': body_edge['top']},
{'x': body_edge['right'], 'y': body_edge['bottom']},
{'x': body_edge['left'], 'y': body_edge['bottom']},
{'x': body_edge['left'], 'y': body_edge['top']+fab_bevel_size},
{'x': body_edge['left']+fab_bevel_size, 'y': body_edge['top']},
]
kicad_mod.append(PolygoneLine(
polygone=poly_fab,
width=configuration['fab_line_width'],
layer="F.Fab"))
# # ############################ CrtYd ##################################
off = ipc_data_set['courtyard']
grid = configuration['courtyard_grid']
if device_params['num_pins_y'] == 0 or device_params['num_pins_x'] == 0:
cy1=roundToBase(bounding_box['top']-off, grid)
kicad_mod.append(RectLine(
start={
'x':roundToBase(bounding_box['left']-off, grid),
'y':cy1
},
end={
'x':roundToBase(bounding_box['right']+off, grid),
'y':roundToBase(bounding_box['bottom']+off, grid)
},
width=configuration['courtyard_line_width'],
layer='F.CrtYd'))
else:
cy1=roundToBase(bounding_box['top']-off, grid)
cy2=roundToBase(body_edge['top']-off, grid)
cy3=-roundToBase(
device_params['pitch']*(device_params['num_pins_y']-1)/2.0
+ pad_width/2.0 + off, grid)
cx1=-roundToBase(
device_params['pitch']*(device_params['num_pins_x']-1)/2.0
+ pad_width/2.0 + off, grid)
cx2=roundToBase(body_edge['left']-off, grid)
cx3=roundToBase(bounding_box['left']-off, grid)
crty_poly_tl = [
{'x':0, 'y':cy1},
{'x':cx1, 'y':cy1},
{'x':cx1, 'y':cy2},
{'x':cx2, 'y':cy2},
{'x':cx2, 'y':cy3},
{'x':cx3, 'y':cy3},
{'x':cx3, 'y':0}
]
kicad_mod.append(PolygoneLine(polygone=crty_poly_tl,
layer='F.CrtYd', width=configuration['courtyard_line_width']))
kicad_mod.append(PolygoneLine(polygone=crty_poly_tl,
layer='F.CrtYd', width=configuration['courtyard_line_width'],
x_mirror=0))
kicad_mod.append(PolygoneLine(polygone=crty_poly_tl,
layer='F.CrtYd', width=configuration['courtyard_line_width'],
y_mirror=0))
kicad_mod.append(PolygoneLine(polygone=crty_poly_tl,
layer='F.CrtYd', width=configuration['courtyard_line_width'],
x_mirror=0, y_mirror=0))
# ######################### Text Fields ###############################
addTextFields(kicad_mod=kicad_mod, configuration=configuration, body_edges=body_edge,
courtyard={'top': cy1, 'bottom': -cy1}, fp_name=fp_name, text_y_inside_position='center')
##################### Output and 3d model ############################
kicad_mod.append(Model(filename=model_name))
output_dir = '{lib_name:s}.pretty/'.format(lib_name=lib_name)
if not os.path.isdir(output_dir): #returns false if path does not yet exist!! (Does not check path validity)
os.makedirs(output_dir)
filename = '{outdir:s}{fp_name:s}.kicad_mod'.format(outdir=output_dir, fp_name=fp_name)
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(filename)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='use confing .yaml files to create footprints. See readme.md for details about the parameter file format.')
parser.add_argument('files', metavar='file', type=str, nargs='+',
help='list of files holding information about what devices should be created.')
parser.add_argument('--global_config', type=str, nargs='?', help='the config file defining how the footprint will look like. (KLC)', default='../../tools/global_config_files/config_KLCv3.0.yaml')
parser.add_argument('--series_config', type=str, nargs='?', help='the config file defining series parameters.', default='../package_config_KLCv3.yaml')
parser.add_argument('--density', type=str, nargs='?', help='IPC density level (L,N,M)', default='N')
parser.add_argument('--ipc_doc', type=str, nargs='?', help='IPC definition document', default='../ipc_definitions.yaml')
parser.add_argument('--force_rectangle_pads', action='store_true', help='Force the generation of rectangle pads instead of rounded rectangle')
parser.add_argument('--kicad4_compatible', action='store_true', help='Create footprints compatible with version 4 (avoids round-rect and custom pads).')
args = parser.parse_args()
if args.density == 'L':
ipc_density = 'least'
elif args.density == 'M':
ipc_density = 'most'
ipc_doc_file = args.ipc_doc
with open(args.global_config, 'r') as config_stream:
try:
configuration = yaml.safe_load(config_stream)
except yaml.YAMLError as exc:
print(exc)
with open(args.series_config, 'r') as config_stream:
try:
configuration.update(yaml.safe_load(config_stream))
except yaml.YAMLError as exc:
print(exc)
if args.force_rectangle_pads or args.kicad4_compatible:
configuration['round_rect_max_radius'] = None
configuration['round_rect_radius_ratio'] = 0
configuration['kicad4_compatible'] = args.kicad4_compatible
for filepath in args.files:
gw = Gullwing(configuration)
with open(filepath, 'r') as command_stream:
try:
cmd_file = yaml.safe_load(command_stream)
except yaml.YAMLError as exc:
print(exc)
header = cmd_file.pop('FileHeader')
for pkg in cmd_file:
print("generating part for parameter set {}".format(pkg))
gw.generateFootprint(cmd_file[pkg], header)
| gpl-3.0 | -423,376,989,085,531,260 | 46.574303 | 199 | 0.557056 | false |
drwahl/pimometer | bin/mongo_config.py | 1 | 5605 | #!/usr/bin/env python
# vim: set expandtab:
"""
**********************************************************************
GPL License
***********************************************************************
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
***********************************************************************/
:author: David Wahlstrom
:email: david.wahlstrom@gmail.com
"""
import datetime
import sys
import logging
import os
try:
from pymongo import Connection
pymongo_driver = True
except ImportError:
import urllib2
pymongo_driver = False
import json
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
if os.path.isfile('/etc/pimometer/pimometer.conf'):
config = '/etc/pimometer/pimometer.conf'
else:
config = os.path.join(os.path.dirname(__file__), '../conf/pimometer.conf')
parser.read(config)
mongodb_host = parser.get('pimometer', 'host')
dbname = parser.get('pimometer', 'database')
collection_name = parser.get('pimometer', 'collection')
assert type(mongodb_host) == str
assert type(dbname) == str
assert type(collection_name) == str
logging.basicConfig(
level=logging.WARN,
format='%(asctime)s %(levelname)s - %(message)s',
datefmt='%y.%m.%d %H:%M:%S')
console = logging.StreamHandler(sys.stderr)
console.setLevel(logging.WARN)
logging.getLogger('pimometer-mongo-config').addHandler(console)
log = logging.getLogger('pimometer-mongo-config')
def configure():
"""
Read configuration file and intialize connection to the mongodb instance
"""
log.debug('in configure')
host = mongodb_host
log.debug('connecting to mongodb host: %s' % host)
database = dbname
log.debug('connecting to database name: %s' % database)
collection = collection_name
log.debug('using collection name: %s' % collection)
if pymongo_driver:
log.debug('using pymongo driver for communications')
con = Connection(host)
log.debug('selecting database/collection: %s/%s' % (database,
collection))
col = con[database][collection]
else:
log.debug('using REST interface for communications')
col = 'http://%s/%s/%s/' % (host, database, collection)
return col
def update_config(collection, event=None, poll_interval=60):
"""
Update client configuration collection
"""
log.debug("in update_config(%s, %s, %s,)" % (collection,
event,
poll_interval))
assert type(poll_interval) == float or type(poll_interval) == int
assert type(event) == str or type(event) == None
# sometimes, we get a string instead of a NoneType
if event == 'None':
event = None
collection.update(
{'_id': 'client_config'},
{"$set": {
'current_event': event,
'poll_interval': int(poll_interval)}},
upsert=True)
assert collection.find_one({'_id': 'client_config'})['current_event'] == event
assert collection.find_one({'_id': 'client_config'})['poll_interval'] == poll_interval
def get_config(collection):
"""
Pull the configuration from the client_config document in the database
"""
config = collection.find_one({'_id': 'client_config'})
assert type(config) == dict
return config
def main():
import argparse
cmd_parser = argparse.ArgumentParser(
description='Configure client_config for pimometer')
cmd_parser.add_argument(
'-g',
'--get',
dest='get_config',
action='store_true',
help='Returns the current configuration for client_config',
default=None)
cmd_parser.add_argument(
'-p',
'--poll-interval',
dest='poll_interval',
action='store',
help='Value to set the poll interval to.',
default=None)
cmd_parser.add_argument(
'-e',
'--event',
dest='event',
action='store',
help='Event of bbq/smoke out',
default=None)
cmd_parser.add_argument(
'-d',
'--debug',
dest='debug',
action='store_true',
help='Enable debugging during execution',
default=None)
args = cmd_parser.parse_args()
if args.debug:
log.setLevel(logging.DEBUG)
collection = configure()
if args.get_config:
print get_config(collection)
else:
if not args.event and not args.poll_interval:
print "ERROR: -e or -p not specified.\n"
cmd_parser.print_help()
sys.exit(1)
else:
if args.event and args.poll_interval:
update_config(collection, str(args.event), int(args.poll_interval))
elif args.event:
update_config(collection, event=str(args.event))
elif args.poll_interval:
update_config(collection, poll_interval=int(args.poll_interval))
if __name__ == "__main__":
main()
| gpl-2.0 | -256,066,831,566,277,950 | 29.966851 | 90 | 0.603211 | false |
SUSE/kiwi | kiwi/utils/sysconfig.py | 1 | 2856 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import os
class SysConfig:
"""
**Read and Write sysconfig style files**
:param str source_file: source file path
"""
def __init__(self, source_file):
self.source_file = source_file
self.data_dict = {}
self.data_list = []
self._read()
def __setitem__(self, key, value):
if key not in self.data_dict:
self.data_list.append(key)
self.data_dict[key] = value
def __getitem__(self, key):
return self.data_dict[key]
def __contains__(self, key):
return key in self.data_dict
def get(self, key):
return self.data_dict.get(key)
def write(self):
"""
Write back source file with changed content but in same order
"""
with open(self.source_file, 'w') as source:
for line in self.data_list:
if line in self.data_dict:
key = line
value = self.data_dict[key]
source.write('{0}={1}'.format(key, value))
else:
source.write(line)
source.write(os.linesep)
def _read(self):
"""
Read file into a list and a key/value dictionary
Only lines which are not considered a comment and
containing the structure key=value are parsed into
the key/value dictionary. In order to keep the order
of lines a list is stored too. Those lines matching
the key/value format are stored with their key in
the list as a placeholder
"""
if os.path.exists(self.source_file):
with open(self.source_file) as source:
for line in source.readlines():
line = line.strip()
if '#' not in line and '=' in line:
elements = line.split('=')
key = elements.pop(0).strip()
value = '='.join(elements)
self.data_dict[key] = value
self.data_list.append(key)
else:
self.data_list.append(line)
| gpl-3.0 | -2,067,535,882,005,381,400 | 33 | 70 | 0.572129 | false |
lucadealfaro/crowdranker | controllers/feedback.py | 1 | 10966 | # -*- coding: utf-8 -*-
import access
import util
@auth.requires_login()
def index():
"""Produces a list of the feedback obtained for a given venue,
or for all venues."""
venue_id = request.args(0)
if venue_id == 'all':
q = (db.submission.user == get_user_email())
else:
q = ((db.submission.user == get_user_email())
& (db.submission.venue_id == venue_id))
db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id]))
db.submission.id.label = T('Submission')
db.submission.id.readable = True
db.submission.venue_id.readable = True
grid = SQLFORM.grid(q,
fields=[db.submission.id, db.submission.venue_id,
db.submission.date_created, db.submission.date_updated, ],
csv=False, details=False, create=False, editable=False, deletable=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid)
@auth.requires_login()
def view_feedback():
"""Shows detailed feedback for a user in a venue.
This controller accepts various types of arguments:
* 's', submission_id
* 'u', venue_id, username
* 'v', venue_id (in which case, shows own submission to that venue)
"""
if len(request.args) == 0:
redirect(URL('default', 'index'))
if request.args(0) == 's':
# submission_id
n_args = 2
subm = db.submission(request.args(1)) or redirect(URL('default', 'index'))
c = db.venue(subm.venue_id) or redirect(URL('default', 'index'))
username = subm.user
elif request.args(0) == 'v':
# venue_id
n_args = 2
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = get_user_email()
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
else:
# venue_id, username
n_args = 3
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = request.args(2) or redirect(URL('default', 'index'))
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
# Checks permissions.
props = db(db.user_properties.user == get_user_email()).select().first()
if props == None:
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
is_author = (username == get_user_email())
can_view_feedback = access.can_view_feedback(c, props) or is_author
if (not can_view_feedback):
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date):
session.flash = T('The ratings are not yet available.')
redirect(URL('feedback', 'index', args=['all']))
# Produces the link to edit the feedback.
edit_feedback_link = None
if subm is not None and access.can_observe(c, props):
edit_feedback_link = A(T('Edit feedback'), _class='btn',
_href=URL('submission', 'edit_feedback', args=[subm.id]))
# Produces the download link.
download_link = None
if subm is not None and c.allow_file_upload and subm.content is not None:
if is_author:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_author', args=[subm.id, subm.content]))
else:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_manager', args=[subm.id, subm.content]))
venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
# Submission link.
subm_link = None
if subm is not None and c.allow_link_submission:
subm_link = A(subm.link, _href=subm.link)
# Submission content and feedback.
subm_comment = None
subm_feedback = None
if subm is not None:
raw_subm_comment = keystore_read(subm.comment)
if raw_subm_comment is not None and len(raw_subm_comment) > 0:
subm_comment = MARKMIN(keystore_read(subm.comment))
raw_feedback = keystore_read(subm.feedback)
if raw_feedback is not None and len(raw_feedback) > 0:
subm_feedback = MARKMIN(raw_feedback)
# Display settings.
db.submission.percentile.readable = True
db.submission.comment.readable = True
db.submission.feedback.readable = True
if access.can_observe(c, props):
db.submission.quality.readable = True
db.submission.error.readable = True
# Reads the grade information.
submission_grade = submission_percentile = None
review_grade = review_percentile = user_reputation = None
final_grade = final_percentile = None
assigned_grade = None
if c.grades_released:
grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first()
if grade_info is not None:
submission_grade = represent_quality(grade_info.submission_grade, None)
submission_percentile = represent_percentage(grade_info.submission_percentile, None)
review_grade = represent_quality_10(grade_info.accuracy, None)
review_percentile = represent_percentage(grade_info.accuracy_percentile, None)
user_reputation = represent_01_as_percentage(grade_info.reputation, None)
final_grade = represent_quality(grade_info.grade, None)
final_percentile = represent_percentage(grade_info.percentile, None)
assigned_grade = represent_quality(grade_info.assigned_grade, None)
# Makes a grid of comments.
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.rejected.readable = True
db.task.helpfulness.readable = db.task.helpfulness.writable = True
# Prevent editing the comments; the only thing editable should be the "is bogus" field.
db.task.comments.writable = False
db.task.comments.readable = True
ranking_link = None
if access.can_observe(c, props):
db.task.user.readable = True
db.task.completed_date.readable = True
links = [
dict(header=T('Review details'), body= lambda r:
A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))),
]
details = False
if subm is not None:
ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id]))
reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id]))
db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user',
args=[v, c.id], user_signature=True))
else:
user_reputation = None
links = [
dict(header=T('Review feedback'), body = lambda r:
A(T('Give feedback'), _class='btn',
_href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))),
]
details = False
ranking_link = None
reviews_link = None
if subm is not None:
q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True))
# q = (db.task.submission_id == subm.id)
else:
q = (db.task.id == -1)
grid = SQLFORM.grid(q,
fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ],
details = details,
csv=False, create=False, editable=False, deletable=False, searchable=False,
links=links,
args=request.args[:n_args],
maxtextlength=24,
)
return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username,
subm_comment=subm_comment, subm_feedback=subm_feedback,
edit_feedback_link=edit_feedback_link,
is_admin=is_user_admin(),
submission_grade=submission_grade, submission_percentile=submission_percentile,
review_grade=review_grade, review_percentile=review_percentile,
user_reputation=user_reputation,
final_grade=final_grade, final_percentile=final_percentile,
assigned_grade=assigned_grade,
venue_link=venue_link, grid=grid, ranking_link=ranking_link,
reviews_link=reviews_link)
@auth.requires_signature()
def reply_to_review():
t = db.task(request.args(0)) or redirect(URL('default', 'index'))
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.comments.readable = False
db.task.helpfulness.readable = db.task.helpfulness.writable = True
db.task.feedback.readable = db.task.feedback.writable = True
form = SQLFORM(db.task, record=t)
form.vars.feedback = keystore_read(t.feedback)
if form.process(onvalidation=validate_review_feedback(t)).accepted:
session.flash = T('Updated.')
redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id]))
link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id]))
review_comments = MARKMIN(keystore_read(t.comments))
return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments)
def validate_review_feedback(t):
def f(form):
if not form.errors:
feedback_id = keystore_update(t.feedback, form.vars.feedback)
form.vars.feedback = feedback_id
return f
@auth.requires_login()
def view_my_reviews():
"""This controller displays the reviews a user has written for a venue, along with
the feedback they received."""
c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index'))
link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
link_to_eval = A(T('My evaluation in this venue'), _class='btn',
_href=URL('feedback', 'view_feedback', args=['v', c.id]))
q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id))
db.task.rejected.readable = True
db.task.helpfulness.readable = True
db.task.comments.readable = True
db.task.feedback.readable = True
# To prevent chopping
db.task.submission_name.represent = represent_text_field
grid = SQLFORM.grid(q,
fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness],
details=True,
editable=False, deletable=False, create=False, searchable=False,
csv=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
| bsd-3-clause | -9,091,172,184,251,331,000 | 45.66383 | 138 | 0.621922 | false |
gabrielmagno/nano-dlna | nanodlna/dlna.py | 1 | 1582 | #!/usr/bin/env python3
# encoding: UTF-8
import os
import pkgutil
import sys
from xml.sax.saxutils import escape as xmlescape
if sys.version_info.major == 3:
import urllib.request as urllibreq
else:
import urllib2 as urllibreq
def send_dlna_action(device, data, action):
action_data = pkgutil.get_data(
"nanodlna", "templates/action-{0}.xml".format(action)).decode("UTF-8")
action_data = action_data.format(**data).encode("UTF-8")
headers = {
"Content-Type": "text/xml; charset=\"utf-8\"",
"Content-Length": "{0}".format(len(action_data)),
"Connection": "close",
"SOAPACTION": "\"{0}#{1}\"".format(device["st"], action)
}
request = urllibreq.Request(device["action_url"], action_data, headers)
urllibreq.urlopen(request)
def play(files_urls, device):
video_data = {
"uri_video": files_urls["file_video"],
"type_video": os.path.splitext(files_urls["file_video"])[1][1:],
}
if "file_subtitle" in files_urls and files_urls["file_subtitle"]:
video_data.update({
"uri_sub": files_urls["file_subtitle"],
"type_sub": os.path.splitext(files_urls["file_subtitle"])[1][1:]
})
metadata = pkgutil.get_data(
"nanodlna",
"templates/metadata-video_subtitle.xml").decode("UTF-8")
video_data["metadata"] = xmlescape(metadata.format(**video_data))
else:
video_data["metadata"] = ""
send_dlna_action(device, video_data, "SetAVTransportURI")
send_dlna_action(device, video_data, "Play")
| mit | 4,625,877,574,989,528,000 | 27.763636 | 78 | 0.616941 | false |
morevnaproject/RenderChan | renderchan/contrib/pencil2d.py | 1 | 5111 |
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import is_true_string
from distutils.version import StrictVersion
import subprocess
import tempfile
import os
from xml.etree import ElementTree
class RenderChanPencil2dModule(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
self.conf['binary']=self.findBinary("pencil2d")
self.conf["packetSize"]=0
# Extra params
self.extraParams["transparency"]="0"
self.extraParams["width"]="-1"
self.extraParams["height"]="-1"
self.extraParams["startFrame"]="1"
self.extraParams["endFrame"]="last"
self.version=StrictVersion('0.5.4') #default value
def checkRequirements(self):
RenderChanModule.checkRequirements(self)
if self.active:
# The CLI features depend on the version
with tempfile.TemporaryDirectory() as tmpPath:
# The exporting of a fake file is a workaround for older versions which just start the program when passed only -v
proc = subprocess.Popen([self.conf['binary'], "-v", "--export-sequence", os.path.join(tmpPath,"test")], stdout=subprocess.PIPE)
try:
outs, errs = proc.communicate(timeout=5)
except TimeoutExpired:
proc.kill()
outs, errs = proc.communicate()
rc = proc.poll()
if rc == 0:
try:
for line in outs.decode("utf-8"):
if line.startswith("Pencil2D "):
# Get the version from stdout. An example of the output: "Pencil2D 0.6.0\n"
self.version = line.rstrip().split(" ")[-1]
self.version = ".".join(self.version.split(".")[0:3])
self.version = StrictVersion(self.version)
except:
self.active = False
else:
self.active = False
if self.active == False:
print("WARNING: Failed to initialize Pencil2D module. The possible reasons for that could be: missing X connection, or the version of Pencil2D on your system is unsupported (too old?). In latter case please consider to get latest version at https://www.pencil2d.org/.")
return self.active
def analyze(self, filename):
info={ "dependencies":[] }
if filename.endswith(".pcl"):
with open(filename, 'r') as f:
tree = ElementTree.parse(f)
root = tree.getroot()
info["dependencies"].extend((os.path.join(filename + ".data", element.get("src")) for element in root.findall(".//*[@src]")))
else:
# We don't actually have to do anything here because there are no dependencies and the default values
# automatically update for changes in the internal width, height, camera etc.
# This is how we would open it if we needed to
"""with ZipFile(filename) as zipdir:
with zipdir.open('main.xml') as mainfile:
tree = ElementTree.parse(mainfile)
root = tree.getroot()"""
return info
def getInputFormats(self):
if self.version >= StrictVersion('0.6.0'):
return ["pcl", "pclx"]
else:
return ["pcl"]
def getOutputFormats(self):
if self.version > StrictVersion('0.6.0'):
return ["png", "jpg", "jpeg", "tif", "tiff", "bmp", "mp4", "avi", "gif", "webm"]
elif self.version == StrictVersion('0.6.0'):
return ["png", "jpg", "jpeg", "tif", "tiff", "bmp"]
else:
return ["png"]
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
comp = 0.0
updateCompletion(comp)
output = os.path.join(outputPath,"file")
if not os.path.exists(outputPath):
os.mkdir(outputPath)
if self.version > StrictVersion('0.6.0'):
commandline=[self.conf['binary'], filename, "-o", output, "--width", extraParams['width'], "--height", extraParams['height'], "--start", str(startFrame), "--end", str(endFrame)]
if is_true_string(extraParams['transparency']):
commandline.append("--transparency")
if ('camera' in extraParams) and (extraParams['camera']):
commandline.extend(["--camera", extraParams['camera']])
elif self.version == StrictVersion('0.6.0'):
commandline=[self.conf['binary'], filename, "--export-sequence", output, "--width", extraParams['width'], "--height", extraParams['height']]
if is_true_string(extraParams['transparency']):
commandline.append("--transparency")
else:
commandline=[self.conf['binary'], filename, "--export-sequence", output]
print(commandline)
subprocess.check_call(commandline)
updateCompletion(1.0)
| bsd-3-clause | -8,211,885,996,274,041,000 | 43.443478 | 285 | 0.576208 | false |
eggplantbren/NSwMCMC | python/straightline2.py | 1 | 2303 | import copy
import numpy as np
import numpy.random as rng
import scipy.special
from utils import randh
from numba import jit
# How many parameters are there?
num_params = 4
# Some data
data = np.loadtxt("road.txt")
N = data.shape[0] # Number of data points
# Plot the data
import matplotlib.pyplot as plt
plt.plot(data[:,0], data[:,1], "o")
plt.xlabel("Age of person (years)")
plt.ylabel("Maximum vision distance (feet)")
plt.show()
# Some idea of how big the Metropolis proposals should be
jump_sizes = np.array([1000.0, 1000.0, 20.0, 5.0])
@jit
def from_prior():
"""
A function to generate parameter values from the prior.
Returns a numpy array of parameter values.
"""
m = 1000.0*rng.randn()
b = 1000.0*rng.randn()
log_sigma = -10.0 + 20.0*rng.rand()
log_nu = 5.0*rng.rand()
return np.array([m, b, log_sigma, log_nu])
@jit
def log_prior(params):
"""
Evaluate the (log of the) prior distribution
"""
# Rename the parameters
m, b, log_sigma, log_nu = params
logp = 0.0
# Normal prior for m and b
# Metropolis only needs the ratio, so I've left out the 2pi bits
logp += -0.5*(m/1000.0)**2
logp += -0.5*(b/1000.0)**2
if log_sigma < -10.0 or log_sigma > 10.0:
return -np.Inf
if log_nu < 0.0 or log_nu > 5.0:
return -np.Inf
return logp
@jit
def log_likelihood(params):
"""
Evaluate the (log of the) likelihood function
"""
# Rename the parameters
m, b, log_sigma, log_nu = params
# Get sigma and nu
sigma = np.exp(log_sigma)
nu = np.exp(log_nu)
# First calculate the straight line
line = m*data[:,0] + b
# t distribution (compare with the pdf on wikipedia, under
# Non-standardized Student's t-distribution)
return N*scipy.special.gammaln(0.5*(nu+1.0))\
-N*0.5*np.log(nu*np.pi) - N*scipy.special.gammaln(0.5*nu) - N*np.log(sigma)\
-0.5*(nu+1.0)*np.sum(np.log(1.0 + ((data[:,1] - line)**2/sigma**2)/nu))
@jit
def proposal(params):
"""
Generate new values for the parameters, for the Metropolis algorithm.
"""
# Copy the parameters
new = copy.deepcopy(params)
# Which one should we change?
which = rng.randint(num_params)
new[which] += jump_sizes[which]*randh()
return new
| gpl-2.0 | 5,760,810,357,822,276,000 | 22.989583 | 87 | 0.6231 | false |
kennedyshead/home-assistant | tests/helpers/test_entity_registry.py | 1 | 32162 | """Tests for the Entity Registry."""
from unittest.mock import patch
import pytest
from homeassistant import config_entries
from homeassistant.const import EVENT_HOMEASSISTANT_START, STATE_UNAVAILABLE
from homeassistant.core import CoreState, callback, valid_entity_id
from homeassistant.exceptions import MaxLengthExceeded
from homeassistant.helpers import device_registry as dr, entity_registry as er
from tests.common import (
MockConfigEntry,
flush_store,
mock_device_registry,
mock_registry,
)
YAML__OPEN_PATH = "homeassistant.util.yaml.loader.open"
@pytest.fixture
def registry(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def update_events(hass):
"""Capture update events."""
events = []
@callback
def async_capture(event):
events.append(event.data)
hass.bus.async_listen(er.EVENT_ENTITY_REGISTRY_UPDATED, async_capture)
return events
async def test_get_or_create_returns_same_entry(hass, registry, update_events):
"""Make sure we do not duplicate entries."""
entry = registry.async_get_or_create("light", "hue", "1234")
entry2 = registry.async_get_or_create("light", "hue", "1234")
await hass.async_block_till_done()
assert len(registry.entities) == 1
assert entry is entry2
assert entry.entity_id == "light.hue_1234"
assert len(update_events) == 1
assert update_events[0]["action"] == "create"
assert update_events[0]["entity_id"] == entry.entity_id
def test_get_or_create_suggested_object_id(registry):
"""Test that suggested_object_id works."""
entry = registry.async_get_or_create(
"light", "hue", "1234", suggested_object_id="beer"
)
assert entry.entity_id == "light.beer"
def test_get_or_create_updates_data(registry):
"""Test that we update data in get_or_create."""
orig_config_entry = MockConfigEntry(domain="light")
orig_entry = registry.async_get_or_create(
"light",
"hue",
"5678",
config_entry=orig_config_entry,
device_id="mock-dev-id",
capabilities={"max": 100},
supported_features=5,
device_class="mock-device-class",
disabled_by=er.DISABLED_HASS,
unit_of_measurement="initial-unit_of_measurement",
original_name="initial-original_name",
original_icon="initial-original_icon",
)
assert orig_entry.config_entry_id == orig_config_entry.entry_id
assert orig_entry.device_id == "mock-dev-id"
assert orig_entry.capabilities == {"max": 100}
assert orig_entry.supported_features == 5
assert orig_entry.device_class == "mock-device-class"
assert orig_entry.disabled_by == er.DISABLED_HASS
assert orig_entry.unit_of_measurement == "initial-unit_of_measurement"
assert orig_entry.original_name == "initial-original_name"
assert orig_entry.original_icon == "initial-original_icon"
new_config_entry = MockConfigEntry(domain="light")
new_entry = registry.async_get_or_create(
"light",
"hue",
"5678",
config_entry=new_config_entry,
device_id="new-mock-dev-id",
capabilities={"new-max": 100},
supported_features=10,
device_class="new-mock-device-class",
disabled_by=er.DISABLED_USER,
unit_of_measurement="updated-unit_of_measurement",
original_name="updated-original_name",
original_icon="updated-original_icon",
)
assert new_entry.config_entry_id == new_config_entry.entry_id
assert new_entry.device_id == "new-mock-dev-id"
assert new_entry.capabilities == {"new-max": 100}
assert new_entry.supported_features == 10
assert new_entry.device_class == "new-mock-device-class"
assert new_entry.unit_of_measurement == "updated-unit_of_measurement"
assert new_entry.original_name == "updated-original_name"
assert new_entry.original_icon == "updated-original_icon"
# Should not be updated
assert new_entry.disabled_by == er.DISABLED_HASS
def test_get_or_create_suggested_object_id_conflict_register(registry):
"""Test that we don't generate an entity id that is already registered."""
entry = registry.async_get_or_create(
"light", "hue", "1234", suggested_object_id="beer"
)
entry2 = registry.async_get_or_create(
"light", "hue", "5678", suggested_object_id="beer"
)
assert entry.entity_id == "light.beer"
assert entry2.entity_id == "light.beer_2"
def test_get_or_create_suggested_object_id_conflict_existing(hass, registry):
"""Test that we don't generate an entity id that currently exists."""
hass.states.async_set("light.hue_1234", "on")
entry = registry.async_get_or_create("light", "hue", "1234")
assert entry.entity_id == "light.hue_1234_2"
def test_create_triggers_save(hass, registry):
"""Test that registering entry triggers a save."""
with patch.object(registry, "async_schedule_save") as mock_schedule_save:
registry.async_get_or_create("light", "hue", "1234")
assert len(mock_schedule_save.mock_calls) == 1
async def test_loading_saving_data(hass, registry):
"""Test that we load/save data correctly."""
mock_config = MockConfigEntry(domain="light")
orig_entry1 = registry.async_get_or_create("light", "hue", "1234")
orig_entry2 = registry.async_get_or_create(
"light",
"hue",
"5678",
device_id="mock-dev-id",
area_id="mock-area-id",
config_entry=mock_config,
capabilities={"max": 100},
supported_features=5,
device_class="mock-device-class",
disabled_by=er.DISABLED_HASS,
original_name="Original Name",
original_icon="hass:original-icon",
)
orig_entry2 = registry.async_update_entity(
orig_entry2.entity_id, name="User Name", icon="hass:user-icon"
)
assert len(registry.entities) == 2
# Now load written data in new registry
registry2 = er.EntityRegistry(hass)
await flush_store(registry._store)
await registry2.async_load()
# Ensure same order
assert list(registry.entities) == list(registry2.entities)
new_entry1 = registry.async_get_or_create("light", "hue", "1234")
new_entry2 = registry.async_get_or_create("light", "hue", "5678")
assert orig_entry1 == new_entry1
assert orig_entry2 == new_entry2
assert new_entry2.device_id == "mock-dev-id"
assert new_entry2.area_id == "mock-area-id"
assert new_entry2.disabled_by == er.DISABLED_HASS
assert new_entry2.capabilities == {"max": 100}
assert new_entry2.supported_features == 5
assert new_entry2.device_class == "mock-device-class"
assert new_entry2.name == "User Name"
assert new_entry2.icon == "hass:user-icon"
assert new_entry2.original_name == "Original Name"
assert new_entry2.original_icon == "hass:original-icon"
def test_generate_entity_considers_registered_entities(registry):
"""Test that we don't create entity id that are already registered."""
entry = registry.async_get_or_create("light", "hue", "1234")
assert entry.entity_id == "light.hue_1234"
assert registry.async_generate_entity_id("light", "hue_1234") == "light.hue_1234_2"
def test_generate_entity_considers_existing_entities(hass, registry):
"""Test that we don't create entity id that currently exists."""
hass.states.async_set("light.kitchen", "on")
assert registry.async_generate_entity_id("light", "kitchen") == "light.kitchen_2"
def test_is_registered(registry):
"""Test that is_registered works."""
entry = registry.async_get_or_create("light", "hue", "1234")
assert registry.async_is_registered(entry.entity_id)
assert not registry.async_is_registered("light.non_existing")
@pytest.mark.parametrize("load_registries", [False])
async def test_loading_extra_values(hass, hass_storage):
"""Test we load extra data from the registry."""
hass_storage[er.STORAGE_KEY] = {
"version": er.STORAGE_VERSION,
"data": {
"entities": [
{
"entity_id": "test.named",
"platform": "super_platform",
"unique_id": "with-name",
"name": "registry override",
},
{
"entity_id": "test.no_name",
"platform": "super_platform",
"unique_id": "without-name",
},
{
"entity_id": "test.disabled_user",
"platform": "super_platform",
"unique_id": "disabled-user",
"disabled_by": er.DISABLED_USER,
},
{
"entity_id": "test.disabled_hass",
"platform": "super_platform",
"unique_id": "disabled-hass",
"disabled_by": er.DISABLED_HASS,
},
{
"entity_id": "test.invalid__entity",
"platform": "super_platform",
"unique_id": "invalid-hass",
"disabled_by": er.DISABLED_HASS,
},
]
},
}
await er.async_load(hass)
registry = er.async_get(hass)
assert len(registry.entities) == 4
entry_with_name = registry.async_get_or_create(
"test", "super_platform", "with-name"
)
entry_without_name = registry.async_get_or_create(
"test", "super_platform", "without-name"
)
assert entry_with_name.name == "registry override"
assert entry_without_name.name is None
assert not entry_with_name.disabled
entry_disabled_hass = registry.async_get_or_create(
"test", "super_platform", "disabled-hass"
)
entry_disabled_user = registry.async_get_or_create(
"test", "super_platform", "disabled-user"
)
assert entry_disabled_hass.disabled
assert entry_disabled_hass.disabled_by == er.DISABLED_HASS
assert entry_disabled_user.disabled
assert entry_disabled_user.disabled_by == er.DISABLED_USER
def test_async_get_entity_id(registry):
"""Test that entity_id is returned."""
entry = registry.async_get_or_create("light", "hue", "1234")
assert entry.entity_id == "light.hue_1234"
assert registry.async_get_entity_id("light", "hue", "1234") == "light.hue_1234"
assert registry.async_get_entity_id("light", "hue", "123") is None
async def test_updating_config_entry_id(hass, registry, update_events):
"""Test that we update config entry id in registry."""
mock_config_1 = MockConfigEntry(domain="light", entry_id="mock-id-1")
entry = registry.async_get_or_create(
"light", "hue", "5678", config_entry=mock_config_1
)
mock_config_2 = MockConfigEntry(domain="light", entry_id="mock-id-2")
entry2 = registry.async_get_or_create(
"light", "hue", "5678", config_entry=mock_config_2
)
assert entry.entity_id == entry2.entity_id
assert entry2.config_entry_id == "mock-id-2"
await hass.async_block_till_done()
assert len(update_events) == 2
assert update_events[0]["action"] == "create"
assert update_events[0]["entity_id"] == entry.entity_id
assert update_events[1]["action"] == "update"
assert update_events[1]["entity_id"] == entry.entity_id
assert update_events[1]["changes"] == {"config_entry_id": "mock-id-1"}
async def test_removing_config_entry_id(hass, registry, update_events):
"""Test that we update config entry id in registry."""
mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1")
entry = registry.async_get_or_create(
"light", "hue", "5678", config_entry=mock_config
)
assert entry.config_entry_id == "mock-id-1"
registry.async_clear_config_entry("mock-id-1")
assert not registry.entities
await hass.async_block_till_done()
assert len(update_events) == 2
assert update_events[0]["action"] == "create"
assert update_events[0]["entity_id"] == entry.entity_id
assert update_events[1]["action"] == "remove"
assert update_events[1]["entity_id"] == entry.entity_id
async def test_removing_area_id(registry):
"""Make sure we can clear area id."""
entry = registry.async_get_or_create("light", "hue", "5678")
entry_w_area = registry.async_update_entity(entry.entity_id, area_id="12345A")
registry.async_clear_area_id("12345A")
entry_wo_area = registry.async_get(entry.entity_id)
assert not entry_wo_area.area_id
assert entry_w_area != entry_wo_area
@pytest.mark.parametrize("load_registries", [False])
async def test_migration(hass):
"""Test migration from old data to new."""
mock_config = MockConfigEntry(domain="test-platform", entry_id="test-config-id")
old_conf = {
"light.kitchen": {
"config_entry_id": "test-config-id",
"unique_id": "test-unique",
"platform": "test-platform",
"name": "Test Name",
"disabled_by": er.DISABLED_HASS,
}
}
with patch("os.path.isfile", return_value=True), patch("os.remove"), patch(
"homeassistant.helpers.entity_registry.load_yaml", return_value=old_conf
):
await er.async_load(hass)
registry = er.async_get(hass)
assert registry.async_is_registered("light.kitchen")
entry = registry.async_get_or_create(
domain="light",
platform="test-platform",
unique_id="test-unique",
config_entry=mock_config,
)
assert entry.name == "Test Name"
assert entry.disabled_by == er.DISABLED_HASS
assert entry.config_entry_id == "test-config-id"
async def test_loading_invalid_entity_id(hass, hass_storage):
"""Test we autofix invalid entity IDs."""
hass_storage[er.STORAGE_KEY] = {
"version": er.STORAGE_VERSION,
"data": {
"entities": [
{
"entity_id": "test.invalid__middle",
"platform": "super_platform",
"unique_id": "id-invalid-middle",
"name": "registry override",
},
{
"entity_id": "test.invalid_end_",
"platform": "super_platform",
"unique_id": "id-invalid-end",
},
{
"entity_id": "test._invalid_start",
"platform": "super_platform",
"unique_id": "id-invalid-start",
},
]
},
}
registry = er.async_get(hass)
entity_invalid_middle = registry.async_get_or_create(
"test", "super_platform", "id-invalid-middle"
)
assert valid_entity_id(entity_invalid_middle.entity_id)
entity_invalid_end = registry.async_get_or_create(
"test", "super_platform", "id-invalid-end"
)
assert valid_entity_id(entity_invalid_end.entity_id)
entity_invalid_start = registry.async_get_or_create(
"test", "super_platform", "id-invalid-start"
)
assert valid_entity_id(entity_invalid_start.entity_id)
async def test_update_entity_unique_id(registry):
"""Test entity's unique_id is updated."""
mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1")
entry = registry.async_get_or_create(
"light", "hue", "5678", config_entry=mock_config
)
assert registry.async_get_entity_id("light", "hue", "5678") == entry.entity_id
new_unique_id = "1234"
with patch.object(registry, "async_schedule_save") as mock_schedule_save:
updated_entry = registry.async_update_entity(
entry.entity_id, new_unique_id=new_unique_id
)
assert updated_entry != entry
assert updated_entry.unique_id == new_unique_id
assert mock_schedule_save.call_count == 1
assert registry.async_get_entity_id("light", "hue", "5678") is None
assert registry.async_get_entity_id("light", "hue", "1234") == entry.entity_id
async def test_update_entity_unique_id_conflict(registry):
"""Test migration raises when unique_id already in use."""
mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1")
entry = registry.async_get_or_create(
"light", "hue", "5678", config_entry=mock_config
)
entry2 = registry.async_get_or_create(
"light", "hue", "1234", config_entry=mock_config
)
with patch.object(
registry, "async_schedule_save"
) as mock_schedule_save, pytest.raises(ValueError):
registry.async_update_entity(entry.entity_id, new_unique_id=entry2.unique_id)
assert mock_schedule_save.call_count == 0
assert registry.async_get_entity_id("light", "hue", "5678") == entry.entity_id
assert registry.async_get_entity_id("light", "hue", "1234") == entry2.entity_id
async def test_update_entity(registry):
"""Test updating entity."""
mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1")
entry = registry.async_get_or_create(
"light", "hue", "5678", config_entry=mock_config
)
for attr_name, new_value in (
("name", "new name"),
("icon", "new icon"),
("disabled_by", er.DISABLED_USER),
):
changes = {attr_name: new_value}
updated_entry = registry.async_update_entity(entry.entity_id, **changes)
assert updated_entry != entry
assert getattr(updated_entry, attr_name) == new_value
assert getattr(updated_entry, attr_name) != getattr(entry, attr_name)
assert (
registry.async_get_entity_id("light", "hue", "5678")
== updated_entry.entity_id
)
entry = updated_entry
async def test_disabled_by(registry):
"""Test that we can disable an entry when we create it."""
entry = registry.async_get_or_create(
"light", "hue", "5678", disabled_by=er.DISABLED_HASS
)
assert entry.disabled_by == er.DISABLED_HASS
entry = registry.async_get_or_create(
"light", "hue", "5678", disabled_by=er.DISABLED_INTEGRATION
)
assert entry.disabled_by == er.DISABLED_HASS
entry2 = registry.async_get_or_create("light", "hue", "1234")
assert entry2.disabled_by is None
async def test_disabled_by_config_entry_pref(registry):
"""Test config entry preference setting disabled_by."""
mock_config = MockConfigEntry(
domain="light",
entry_id="mock-id-1",
pref_disable_new_entities=True,
)
entry = registry.async_get_or_create(
"light", "hue", "AAAA", config_entry=mock_config
)
assert entry.disabled_by == er.DISABLED_INTEGRATION
entry2 = registry.async_get_or_create(
"light",
"hue",
"BBBB",
config_entry=mock_config,
disabled_by=er.DISABLED_USER,
)
assert entry2.disabled_by == er.DISABLED_USER
async def test_restore_states(hass):
"""Test restoring states."""
hass.state = CoreState.not_running
registry = er.async_get(hass)
registry.async_get_or_create(
"light",
"hue",
"1234",
suggested_object_id="simple",
)
# Should not be created
registry.async_get_or_create(
"light",
"hue",
"5678",
suggested_object_id="disabled",
disabled_by=er.DISABLED_HASS,
)
registry.async_get_or_create(
"light",
"hue",
"9012",
suggested_object_id="all_info_set",
capabilities={"max": 100},
supported_features=5,
device_class="mock-device-class",
original_name="Mock Original Name",
original_icon="hass:original-icon",
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START, {})
await hass.async_block_till_done()
simple = hass.states.get("light.simple")
assert simple is not None
assert simple.state == STATE_UNAVAILABLE
assert simple.attributes == {"restored": True, "supported_features": 0}
disabled = hass.states.get("light.disabled")
assert disabled is None
all_info_set = hass.states.get("light.all_info_set")
assert all_info_set is not None
assert all_info_set.state == STATE_UNAVAILABLE
assert all_info_set.attributes == {
"max": 100,
"supported_features": 5,
"device_class": "mock-device-class",
"restored": True,
"friendly_name": "Mock Original Name",
"icon": "hass:original-icon",
}
registry.async_remove("light.disabled")
registry.async_remove("light.simple")
registry.async_remove("light.all_info_set")
await hass.async_block_till_done()
assert hass.states.get("light.simple") is None
assert hass.states.get("light.disabled") is None
assert hass.states.get("light.all_info_set") is None
async def test_async_get_device_class_lookup(hass):
"""Test registry device class lookup."""
hass.state = CoreState.not_running
ent_reg = er.async_get(hass)
ent_reg.async_get_or_create(
"binary_sensor",
"light",
"battery_charging",
device_id="light_device_entry_id",
device_class="battery_charging",
)
ent_reg.async_get_or_create(
"sensor",
"light",
"battery",
device_id="light_device_entry_id",
device_class="battery",
)
ent_reg.async_get_or_create(
"light", "light", "demo", device_id="light_device_entry_id"
)
ent_reg.async_get_or_create(
"binary_sensor",
"vacuum",
"battery_charging",
device_id="vacuum_device_entry_id",
device_class="battery_charging",
)
ent_reg.async_get_or_create(
"sensor",
"vacuum",
"battery",
device_id="vacuum_device_entry_id",
device_class="battery",
)
ent_reg.async_get_or_create(
"vacuum", "vacuum", "demo", device_id="vacuum_device_entry_id"
)
ent_reg.async_get_or_create(
"binary_sensor",
"remote",
"battery_charging",
device_id="remote_device_entry_id",
device_class="battery_charging",
)
ent_reg.async_get_or_create(
"remote", "remote", "demo", device_id="remote_device_entry_id"
)
device_lookup = ent_reg.async_get_device_class_lookup(
{("binary_sensor", "battery_charging"), ("sensor", "battery")}
)
assert device_lookup == {
"remote_device_entry_id": {
(
"binary_sensor",
"battery_charging",
): "binary_sensor.remote_battery_charging"
},
"light_device_entry_id": {
(
"binary_sensor",
"battery_charging",
): "binary_sensor.light_battery_charging",
("sensor", "battery"): "sensor.light_battery",
},
"vacuum_device_entry_id": {
(
"binary_sensor",
"battery_charging",
): "binary_sensor.vacuum_battery_charging",
("sensor", "battery"): "sensor.vacuum_battery",
},
}
async def test_remove_device_removes_entities(hass, registry):
"""Test that we remove entities tied to a device."""
device_registry = mock_device_registry(hass)
config_entry = MockConfigEntry(domain="light")
device_entry = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entry = registry.async_get_or_create(
"light",
"hue",
"5678",
config_entry=config_entry,
device_id=device_entry.id,
)
assert registry.async_is_registered(entry.entity_id)
device_registry.async_remove_device(device_entry.id)
await hass.async_block_till_done()
assert not registry.async_is_registered(entry.entity_id)
async def test_update_device_race(hass, registry):
"""Test race when a device is created, updated and removed."""
device_registry = mock_device_registry(hass)
config_entry = MockConfigEntry(domain="light")
# Create device
device_entry = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
# Update it
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={("bridgeid", "0123")},
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
# Add entity to the device
entry = registry.async_get_or_create(
"light",
"hue",
"5678",
config_entry=config_entry,
device_id=device_entry.id,
)
assert registry.async_is_registered(entry.entity_id)
device_registry.async_remove_device(device_entry.id)
await hass.async_block_till_done()
assert not registry.async_is_registered(entry.entity_id)
async def test_disable_device_disables_entities(hass, registry):
"""Test that we disable entities tied to a device."""
device_registry = mock_device_registry(hass)
config_entry = MockConfigEntry(domain="light")
config_entry.add_to_hass(hass)
device_entry = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entry1 = registry.async_get_or_create(
"light",
"hue",
"5678",
config_entry=config_entry,
device_id=device_entry.id,
)
entry2 = registry.async_get_or_create(
"light",
"hue",
"ABCD",
config_entry=config_entry,
device_id=device_entry.id,
disabled_by=er.DISABLED_USER,
)
entry3 = registry.async_get_or_create(
"light",
"hue",
"EFGH",
config_entry=config_entry,
device_id=device_entry.id,
disabled_by=er.DISABLED_CONFIG_ENTRY,
)
assert not entry1.disabled
assert entry2.disabled
assert entry3.disabled
device_registry.async_update_device(device_entry.id, disabled_by=er.DISABLED_USER)
await hass.async_block_till_done()
entry1 = registry.async_get(entry1.entity_id)
assert entry1.disabled
assert entry1.disabled_by == er.DISABLED_DEVICE
entry2 = registry.async_get(entry2.entity_id)
assert entry2.disabled
assert entry2.disabled_by == er.DISABLED_USER
entry3 = registry.async_get(entry3.entity_id)
assert entry3.disabled
assert entry3.disabled_by == er.DISABLED_CONFIG_ENTRY
device_registry.async_update_device(device_entry.id, disabled_by=None)
await hass.async_block_till_done()
entry1 = registry.async_get(entry1.entity_id)
assert not entry1.disabled
entry2 = registry.async_get(entry2.entity_id)
assert entry2.disabled
assert entry2.disabled_by == er.DISABLED_USER
entry3 = registry.async_get(entry3.entity_id)
assert entry3.disabled
assert entry3.disabled_by == er.DISABLED_CONFIG_ENTRY
async def test_disable_config_entry_disables_entities(hass, registry):
"""Test that we disable entities tied to a config entry."""
device_registry = mock_device_registry(hass)
config_entry = MockConfigEntry(domain="light")
config_entry.add_to_hass(hass)
device_entry = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entry1 = registry.async_get_or_create(
"light",
"hue",
"5678",
config_entry=config_entry,
device_id=device_entry.id,
)
entry2 = registry.async_get_or_create(
"light",
"hue",
"ABCD",
config_entry=config_entry,
device_id=device_entry.id,
disabled_by=er.DISABLED_USER,
)
entry3 = registry.async_get_or_create(
"light",
"hue",
"EFGH",
config_entry=config_entry,
device_id=device_entry.id,
disabled_by=er.DISABLED_DEVICE,
)
assert not entry1.disabled
assert entry2.disabled
assert entry3.disabled
await hass.config_entries.async_set_disabled_by(
config_entry.entry_id, config_entries.DISABLED_USER
)
await hass.async_block_till_done()
entry1 = registry.async_get(entry1.entity_id)
assert entry1.disabled
assert entry1.disabled_by == er.DISABLED_CONFIG_ENTRY
entry2 = registry.async_get(entry2.entity_id)
assert entry2.disabled
assert entry2.disabled_by == er.DISABLED_USER
entry3 = registry.async_get(entry3.entity_id)
assert entry3.disabled
assert entry3.disabled_by == er.DISABLED_DEVICE
await hass.config_entries.async_set_disabled_by(config_entry.entry_id, None)
await hass.async_block_till_done()
entry1 = registry.async_get(entry1.entity_id)
assert not entry1.disabled
entry2 = registry.async_get(entry2.entity_id)
assert entry2.disabled
assert entry2.disabled_by == er.DISABLED_USER
# The device was re-enabled, so entity disabled by the device will be re-enabled too
entry3 = registry.async_get(entry3.entity_id)
assert not entry3.disabled_by
async def test_disabled_entities_excluded_from_entity_list(hass, registry):
"""Test that disabled entities are excluded from async_entries_for_device."""
device_registry = mock_device_registry(hass)
config_entry = MockConfigEntry(domain="light")
device_entry = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entry1 = registry.async_get_or_create(
"light",
"hue",
"5678",
config_entry=config_entry,
device_id=device_entry.id,
)
entry2 = registry.async_get_or_create(
"light",
"hue",
"ABCD",
config_entry=config_entry,
device_id=device_entry.id,
disabled_by=er.DISABLED_USER,
)
entries = er.async_entries_for_device(registry, device_entry.id)
assert entries == [entry1]
entries = er.async_entries_for_device(
registry, device_entry.id, include_disabled_entities=True
)
assert entries == [entry1, entry2]
async def test_entity_max_length_exceeded(hass, registry):
"""Test that an exception is raised when the max character length is exceeded."""
long_entity_id_name = (
"1234567890123456789012345678901234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890123456789012345678901234567890"
)
with pytest.raises(MaxLengthExceeded) as exc_info:
registry.async_generate_entity_id("sensor", long_entity_id_name)
assert exc_info.value.property_name == "generated_entity_id"
assert exc_info.value.max_length == 255
assert exc_info.value.value == f"sensor.{long_entity_id_name}"
# Try again but against the domain
long_domain_name = long_entity_id_name
with pytest.raises(MaxLengthExceeded) as exc_info:
registry.async_generate_entity_id(long_domain_name, "sensor")
assert exc_info.value.property_name == "domain"
assert exc_info.value.max_length == 64
assert exc_info.value.value == long_domain_name
# Try again but force a number to get added to the entity ID
long_entity_id_name = (
"1234567890123456789012345678901234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890123456789012345678901234567890"
"1234567890123456789012345678901234567"
)
with pytest.raises(MaxLengthExceeded) as exc_info:
registry.async_generate_entity_id(
"sensor", long_entity_id_name, [f"sensor.{long_entity_id_name}"]
)
assert exc_info.value.property_name == "generated_entity_id"
assert exc_info.value.max_length == 255
assert exc_info.value.value == f"sensor.{long_entity_id_name}_2"
| apache-2.0 | 2,473,947,555,476,688,400 | 32.819138 | 88 | 0.631864 | false |
ActiveState/code | recipes/Python/286240_Python_portscanners/recipe-286240.py | 1 | 3523 | # a simple portscanner with multithreading
import socket as sk
import sys
import threading
MAX_THREADS = 50
def usage():
print "\npyScan 0.1"
print "usage: pyScan <host> [start port] [end port]"
class Scanner(threading.Thread):
def __init__(self, host, port):
threading.Thread.__init__(self)
# host and port
self.host = host
self.port = port
# build up the socket obj
self.sd = sk.socket(sk.AF_INET, sk.SOCK_STREAM)
def run(self):
try:
# connect to the given host:port
self.sd.connect((self.host, self.port))
print "%s:%d OPEN" % (self.host, self.port)
self.sd.close()
except: pass
class pyScan:
def __init__(self, args=[]):
# arguments vector
self.args = args
# start port and end port
self.start, self.stop = 1, 1024
# host name
self.host = ""
# check the arguments
if len(self.args) == 4:
self.host = self.args[1]
try:
self.start = int(self.args[2])
self.stop = int(self.args[3])
except ValueError:
usage()
return
if self.start > self.stop:
usage()
return
elif len(self.args) == 2:
self.host = self.args[1]
else:
usage()
return
try:
sk.gethostbyname(self.host)
except:
print "hostname '%s' unknown" % self.host
self.scan(self.host, self.start, self.stop)
def scan(self, host, start, stop):
self.port = start
while self.port <= stop:
while threading.activeCount() < MAX_THREADS:
Scanner(host, self.port).start()
self.port += 1
if __name__ == "__main__":
pyScan(sys.argv)
#############################################################
# a simple portscanner with multithreading
# QUEUE BASED VERSION
import socket
import sys
import threading, Queue
MAX_THREADS = 50
class Scanner(threading.Thread):
def __init__(self, inq, outq):
threading.Thread.__init__(self)
self.setDaemon(1)
# queues for (host, port)
self.inq = inq
self.outq = outq
def run(self):
while 1:
host, port = self.inq.get()
sd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# connect to the given host:port
sd.connect((host, port))
except socket.error:
# set the CLOSED flag
self.outq.put((host, port, 'CLOSED'))
else:
self.outq.put((host, port, 'OPEN'))
sd.close()
def scan(host, start, stop, nthreads=MAX_THREADS):
toscan = Queue.Queue()
scanned = Queue.Queue()
scanners = [Scanner(toscan, scanned) for i in range(nthreads)]
for scanner in scanners:
scanner.start()
hostports = [(host, port) for port in xrange(start, stop+1)]
for hostport in hostports:
toscan.put(hostport)
results = {}
for host, port in hostports:
while (host, port) not in results:
nhost, nport, nstatus = scanned.get()
results[(nhost, nport)] = nstatus
status = results[(host, port)]
if status <> 'CLOSED':
print '%s:%d %s' % (host, port, status)
if __name__ == '__main__':
scan('localhost', 0, 1024)
| mit | 749,350,839,647,121,400 | 26.523438 | 74 | 0.518024 | false |
bacaldwell/ironic | ironic/tests/unit/common/test_image_service.py | 1 | 16700 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import os
import shutil
import mock
from oslo_config import cfg
import requests
import sendfile
import six
import six.moves.builtins as __builtin__
from six.moves import http_client
from ironic.common import exception
from ironic.common.glance_service.v1 import image_service as glance_v1_service
from ironic.common import image_service
from ironic.common import keystone
from ironic.tests import base
if six.PY3:
import io
file = io.BytesIO
class HttpImageServiceTestCase(base.TestCase):
def setUp(self):
super(HttpImageServiceTestCase, self).setUp()
self.service = image_service.HttpImageService()
self.href = 'http://127.0.0.1:12345/fedora.qcow2'
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href(self, head_mock):
response = head_mock.return_value
response.status_code = http_client.OK
self.service.validate_href(self.href)
head_mock.assert_called_once_with(self.href)
response.status_code = http_client.NO_CONTENT
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
response.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_error_code(self, head_mock):
head_mock.return_value.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_error(self, head_mock):
head_mock.side_effect = requests.ConnectionError()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href)
@mock.patch.object(requests, 'head', autospec=True)
def _test_show(self, head_mock, mtime, mtime_date):
head_mock.return_value.status_code = http_client.OK
head_mock.return_value.headers = {
'Content-Length': 100,
'Last-Modified': mtime
}
result = self.service.show(self.href)
head_mock.assert_called_once_with(self.href)
self.assertEqual({'size': 100, 'updated_at': mtime_date,
'properties': {}}, result)
def test_show_rfc_822(self):
self._test_show(mtime='Tue, 15 Nov 2014 08:12:31 GMT',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
def test_show_rfc_850(self):
self._test_show(mtime='Tuesday, 15-Nov-14 08:12:31 GMT',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
def test_show_ansi_c(self):
self._test_show(mtime='Tue Nov 15 08:12:31 2014',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
@mock.patch.object(requests, 'head', autospec=True)
def test_show_no_content_length(self, head_mock):
head_mock.return_value.status_code = http_client.OK
head_mock.return_value.headers = {}
self.assertRaises(exception.ImageRefValidationFailed,
self.service.show, self.href)
head_mock.assert_called_with(self.href)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success(self, req_get_mock, shutil_mock):
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=file)
file_mock = mock.Mock(spec=file)
self.service.download(self.href, file_mock)
shutil_mock.assert_called_once_with(
response_mock.raw.__enter__(), file_mock,
image_service.IMAGE_CHUNK_SIZE
)
req_get_mock.assert_called_once_with(self.href, stream=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_connerror(self, req_get_mock):
req_get_mock.side_effect = requests.ConnectionError()
file_mock = mock.Mock(spec=file)
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_ioerror(self, req_get_mock, shutil_mock):
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=file)
file_mock = mock.Mock(spec=file)
shutil_mock.side_effect = IOError
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
req_get_mock.assert_called_once_with(self.href, stream=True)
class FileImageServiceTestCase(base.TestCase):
def setUp(self):
super(FileImageServiceTestCase, self).setUp()
self.service = image_service.FileImageService()
self.href = 'file:///home/user/image.qcow2'
self.href_path = '/home/user/image.qcow2'
@mock.patch.object(os.path, 'isfile', return_value=True, autospec=True)
def test_validate_href(self, path_exists_mock):
self.service.validate_href(self.href)
path_exists_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os.path, 'isfile', return_value=False, autospec=True)
def test_validate_href_path_not_found_or_not_file(self, path_exists_mock):
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
path_exists_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os.path, 'getmtime', return_value=1431087909.1641912,
autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_show(self, _validate_mock, getsize_mock, getmtime_mock):
_validate_mock.return_value = self.href_path
result = self.service.show(self.href)
getsize_mock.assert_called_once_with(self.href_path)
getmtime_mock.assert_called_once_with(self.href_path)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual({'size': 42,
'updated_at': datetime.datetime(2015, 5, 8,
12, 25, 9, 164191),
'properties': {}}, result)
@mock.patch.object(os, 'link', autospec=True)
@mock.patch.object(os, 'remove', autospec=True)
@mock.patch.object(os, 'access', return_value=True, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_hard_link(self, _validate_mock, stat_mock, access_mock,
remove_mock, link_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.Mock(spec=file)
file_mock.name = 'file'
self.service.download(self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
remove_mock.assert_called_once_with('file')
link_mock.assert_called_once_with(self.href_path, 'file')
@mock.patch.object(sendfile, 'sendfile', autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(__builtin__, 'open', autospec=True)
@mock.patch.object(os, 'access', return_value=False, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_copy(self, _validate_mock, stat_mock, access_mock,
open_mock, size_mock, copy_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=file)
file_mock.name = 'file'
input_mock = mock.MagicMock(spec=file)
open_mock.return_value = input_mock
self.service.download(self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
copy_mock.assert_called_once_with(file_mock.fileno(),
input_mock.__enter__().fileno(),
0, 42)
size_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os, 'remove', side_effect=OSError, autospec=True)
@mock.patch.object(os, 'access', return_value=True, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_hard_link_fail(self, _validate_mock, stat_mock,
access_mock, remove_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=file)
file_mock.name = 'file'
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
@mock.patch.object(sendfile, 'sendfile', side_effect=OSError,
autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(__builtin__, 'open', autospec=True)
@mock.patch.object(os, 'access', return_value=False, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_copy_fail(self, _validate_mock, stat_mock, access_mock,
open_mock, size_mock, copy_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=file)
file_mock.name = 'file'
input_mock = mock.MagicMock(spec=file)
open_mock.return_value = input_mock
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
size_mock.assert_called_once_with(self.href_path)
class ServiceGetterTestCase(base.TestCase):
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service(self, glance_service_mock, token_mock):
image_href = 'image-uuid'
self.context.auth_token = 'fake'
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
self.assertFalse(token_mock.called)
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service_url(self, glance_service_mock,
token_mock):
image_href = 'glance://image-uuid'
self.context.auth_token = 'fake'
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
self.assertFalse(token_mock.called)
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service_no_token(self, glance_service_mock,
token_mock):
image_href = 'image-uuid'
self.context.auth_token = None
token_mock.return_value = 'admin-token'
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
token_mock.assert_called_once_with()
self.assertEqual('admin-token', self.context.auth_token)
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service_token_not_needed(self,
glance_service_mock,
token_mock):
image_href = 'image-uuid'
self.context.auth_token = None
self.config(auth_strategy='noauth', group='glance')
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
self.assertFalse(token_mock.called)
self.assertIsNone(self.context.auth_token)
@mock.patch.object(image_service.HttpImageService, '__init__',
return_value=None, autospec=True)
def test_get_http_image_service(self, http_service_mock):
image_href = 'http://127.0.0.1/image.qcow2'
image_service.get_image_service(image_href)
http_service_mock.assert_called_once_with()
@mock.patch.object(image_service.HttpImageService, '__init__',
return_value=None, autospec=True)
def test_get_https_image_service(self, http_service_mock):
image_href = 'https://127.0.0.1/image.qcow2'
image_service.get_image_service(image_href)
http_service_mock.assert_called_once_with()
@mock.patch.object(image_service.FileImageService, '__init__',
return_value=None, autospec=True)
def test_get_file_image_service(self, local_service_mock):
image_href = 'file:///home/user/image.qcow2'
image_service.get_image_service(image_href)
local_service_mock.assert_called_once_with()
def test_get_image_service_unknown_protocol(self):
image_href = 'usenet://alt.binaries.dvd/image.qcow2'
self.assertRaises(exception.ImageRefValidationFailed,
image_service.get_image_service, image_href)
def test_out_range_auth_strategy(self):
self.assertRaises(ValueError, cfg.CONF.set_override,
'auth_strategy', 'fake', 'glance',
enforce_type=True)
def test_out_range_glance_protocol(self):
self.assertRaises(ValueError, cfg.CONF.set_override,
'glance_protocol', 'fake', 'glance',
enforce_type=True)
| apache-2.0 | 4,742,086,651,356,532,000 | 47.688047 | 78 | 0.627365 | false |
CommonsCloud/Core-API | CommonsCloudAPI/models/user.py | 1 | 8946 | """
For CommonsCloud copyright information please see the LICENSE document
(the "License") included with this software package. This file may not
be used in any manner except in compliance with the License
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import hashlib
"""
Import Flask Dependencies
"""
from flask.ext.security import current_user
from flask.ext.security import UserMixin
from flask.ext.security import RoleMixin
from flask.ext.security import SQLAlchemyUserDatastore
from werkzeug import generate_password_hash
from werkzeug import check_password_hash
"""
Import Commons Cloud Dependencies
"""
from CommonsCloudAPI.extensions import db
from CommonsCloudAPI.extensions import logger
from CommonsCloudAPI.extensions import sanitize
from CommonsCloudAPI.extensions import status as status_
from CommonsCloudAPI.models.base import CommonsModel
from CommonsCloudAPI.models.template import UserTemplates
from CommonsCloudAPI.models.application import UserApplications
user_roles = db.Table('user_roles',
db.Column('user', db.Integer(), db.ForeignKey('user.id')),
db.Column('role', db.Integer(), db.ForeignKey('role.id')),
extend_existing = True
)
"""
This defines our basic Role model, we have to have this becasue of the
Flask-Security module. If you remove it Flask-Security gets fussy.
"""
class Role(db.Model, RoleMixin):
__tablename__ = 'role'
__table_args__ = {
'extend_existing': True
}
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
"""
This defines our basic User model, we have to have this becasue of the
Flask-Security module. If you remove it Flask-Security gets fussy.
"""
class User(db.Model, UserMixin, CommonsModel):
__public__ = {'default': ['id', 'name', 'email', 'active', 'confirmed_at']}
__tablename__ = 'user'
__table_args__ = {
'extend_existing': True
}
"""
Define the fields that we will use to create the User table in our
database for use with our SQLAlchemy model
"""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
email = db.Column(db.String(255))
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=user_roles, backref=db.backref('users'))
applications = db.relationship('UserApplications', backref=db.backref('users'))
templates = db.relationship('UserTemplates', backref=db.backref('users'))
fields = db.relationship('UserFields', backref=db.backref('users'))
def __init__(self, name="", email="", password="", active=True, roles=[], permissions=[]):
self.name = name
self.email = email
self.password = password
self.active = active
self.roles = roles
self.permissions = permissions
"""
Set the user password using the pbkdf2:sha1 method and a salt_length of 64
"""
def set_password(self, password):
self.password = generate_password_hash(password, method='pbkdf2:sha1', salt_length=64)
"""
Check to see if the password entered by the user matches the password saved
in the database associated with the acting user
@param (object) self
@param (string) password
The password to check against the database
@return (bool)
The boolean of whether or not the passwords match
"""
def check_password(self, password):
return check_password_hash(self.password, password)
def user_create(self, new_user):
new_user_ = User(**new_user)
db.session.add(new_user_)
db.session.commit()
return new_user_
"""
Get the SQLAlchemy User object for the current_user
@param (object) self
@return (object) user_
The object of the current user, not to be confused with current_user
"""
def user_get(self, user_id):
user_ = User.query.get(user_id)
if not user_:
return status_.status_404('We couldn\'t find the user you were looking for.'), 404
return {
'active': user_.active,
'member_since': user_.confirmed_at.strftime('%b %d, %Y'),
'picture': self.user_picture(user_.email),
'email': user_.email,
'id': user_.id,
'name': user_.name
}
"""
Get the a list of User objects for the entire system
@return (array) users_
The array of objects for all users in system
"""
def user_list(self):
users_ = User.query.all()
return users_
"""
Get the a list of User objects limited to a specific Application
@return (array) users_
The array of objects for all Application Users in system
"""
def application_user_list(self, application_id):
user_list = []
ApplicationUsers = UserApplications.query.filter_by(application_id=application_id).all()
for user in ApplicationUsers:
user_list.append(user.user_id)
return User.query.filter(User.id.in_(user_list)).all()
"""
Get the a list of User objects limited to a specific Template
@return (array) users_
The array of objects for all Template Users in system
"""
def template_user_list(self, template_id):
user_list = []
TemplateUsers = UserTemplates.query.filter_by(template_id=template_id).all()
for user in TemplateUsers:
user_list.append(user.user_id)
return User.query.filter(User.id.in_(user_list)).all()
def user_update(self, user_object_):
"""
Before updating any information we first have to load the User object for the
user we wish to act upon. To make extra sure that one user doesn't update another
by sending an alertnative 'id' through with the post request. We only act on the
`current_user` as defined by the security module.
"""
user_ = User.query.get(current_user.id)
"""
Content that needs sanitized
"""
user_.name = sanitize.sanitize_string(user_object_.get('name', current_user.name))
user_.email = sanitize.sanitize_string(user_object_.get('email', current_user.email))
"""
Booleans and Arrays are not sanitized right now ... they probably should be
"""
# user_.active = user_object_.get('active', current_user.active)
# user_.roles = user_object_.get('roles', current_user.roles)
# user_.permissions = user_object_.get('permissions', current_user.permissions)
"""
Save all of our updates to the database
"""
db.session.commit()
return user_
"""
Remove a user entirely from our system
This should be a multiple step process:
1. User arrives at the "Remove account" page
2. Message is displayed warning the user of ramifications of account removal
3. User must type in their current password
"""
def user_remove(self):
pass
def user_picture(self, email):
user_email = email.lower()
user_hash = hashlib.md5(user_email).hexdigest()
picture_url = '//www.gravatar.com/avatar/' + user_hash
return picture_url
"""
Get a list of Users that have access to the Application requested by
the user, but make sure the User requesting this information is logged
in already and has `is_admin` permission to the requested Applciation
"""
def application_users(self, application_id):
allowed_applications = self.allowed_applications('is_admin')
if not application_id in allowed_applications:
logger.warning('User %d with Applications %s tried to access Users for Application %d', \
self.current_user.id, allowed_applications, application_id)
return status_.status_401('You are not allowed to view the Users of this Application because you do not have the permission to do so'), 401
return self.application_user_list(application_id)
"""
Get a list of Users that have access to the Template requested by
the user, but make sure the User requesting this information is logged
in already and has `is_admin` permission to the requested Template
"""
def template_users(self, template_id):
allowed_templates = self.allowed_templates('is_admin')
if not template_id in allowed_templates:
logger.warning('User %d tried to access Users for Template %d', \
self.current_user.id, template_id)
return status_.status_401('You are not allowed to view the Users of this Template because you do not have the permission to do so'), 401
return self.template_user_list(template_id)
"""
The last thing we need to do is actually hook these things up to the
User Datastore provided by SQLAlchemy's Engine's datastore that provides
Flask-Security with User/Role information so we can lock down access
to the system and it's resources.
"""
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
| agpl-3.0 | 2,422,971,609,378,438,700 | 28.331148 | 145 | 0.705455 | false |
mjschultz/redlock | tests/test_lock.py | 1 | 1801 | from redlock import RedLock
import time
def test_default_connection_details_value():
"""
Test that RedLock instance could be created with
default value of `connection_details` argument.
"""
lock = RedLock("test_simple_lock")
def test_simple_lock():
"""
Test a RedLock can be acquired.
"""
lock = RedLock("test_simple_lock", [{"host": "localhost"}], ttl=1000)
locked = lock.acquire()
lock.release()
assert locked == True
def test_context_manager():
"""
Test a RedLock can be released by the context manager automically.
"""
with RedLock("test_context_manager", [{"host": "localhost"}], ttl=1000):
lock = RedLock("test_context_manager", [{"host": "localhost"}], ttl=1000)
locked = lock.acquire()
assert locked == False
lock = RedLock("test_context_manager", [{"host": "localhost"}], ttl=1000)
locked = lock.acquire()
assert locked == True
lock.release()
def test_fail_to_lock_acquired():
lock1 = RedLock("test_fail_to_lock_acquired", [{"host": "localhost"}], ttl=1000)
lock2 = RedLock("test_fail_to_lock_acquired", [{"host": "localhost"}], ttl=1000)
lock1_locked = lock1.acquire()
lock2_locked = lock2.acquire()
lock1.release()
assert lock1_locked == True
assert lock2_locked == False
def test_lock_expire():
lock1 = RedLock("test_lock_expire", [{"host": "localhost"}], ttl=500)
lock1.acquire()
time.sleep(1)
# Now lock1 has expired, we can accquire a lock
lock2 = RedLock("test_lock_expire", [{"host": "localhost"}], ttl=1000)
locked = lock2.acquire()
assert locked == True
lock1.release()
lock3 = RedLock("test_lock_expire", [{"host": "localhost"}], ttl=1000)
locked = lock3.acquire()
assert locked == False
| mit | 3,412,506,755,901,172,000 | 26.707692 | 84 | 0.62965 | false |
chme/plugin.audio.mpdclient2 | mpdclient/Navigation.py | 1 | 19566 | #
# Copyright (c) chme
#
# This file is part of the mpdclient kodi plugin
#
# This plugin is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of
# the License, or (at your option) any later version.
#
# This plugin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
import urllib
import xbmcgui
import xbmcplugin
from lib import mpd
from mpdclient.Env import Env
from mpdclient.Language import Language
class NavUrl:
__KEY_NAVID = "navid"
__KEY_PARAM = "param"
__SEPARATOR = "###"
@staticmethod
def build_url(env, navid, params=[]):
return env.base_url() + "?" + urllib.urlencode({NavUrl.__KEY_NAVID: navid}) + "&" + urllib.urlencode({NavUrl.__KEY_PARAM: NavUrl.__SEPARATOR.join(params)})
@staticmethod
def get_navid(env):
return env.param_string(NavUrl.__KEY_NAVID)
@staticmethod
def get_params(env):
return env.param_string(NavUrl.__KEY_PARAM).split(NavUrl.__SEPARATOR)
class Nav:
NAV_FILE = "file"
NAV_PL = "playlist"
NAV_LIST = "list"
NAV_FIND = "find"
NAV_QUEUE = "queue"
NAV_PLAYLISTS = "playlists"
ACTION_ADD = "add"
ACTION_LOAD = "load"
ACTION_FINDADD = "findadd"
ACTION_REMOVE = "remove"
ACTION_CLEAR = "clear"
ACTION_PLAY = "play"
ACTION_PAUSE = "pause"
ACTION_PREV = "prev"
ACTION_NEXT = "next"
ACTION_STOP = "stop"
ACTION_OUTPUTS = "outputs"
def __init__(self):
self.__env = Env()
self.__mpc = mpd.MPDClient()
return
def handle(self):
self.__connect_mpd()
params = NavUrl.get_params(self.__env)
navid = NavUrl.get_navid(self.__env)
if navid == Nav.NAV_FILE:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TITLE)
self.__nav_file(self.__env, self.__mpc, params)
elif navid == Nav.NAV_PL:
xbmcplugin.setContent(self.__env.addon_handle(), "songs")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_NONE)
self.__nav_pl(self.__env, self.__mpc, params)
elif navid == Nav.NAV_PLAYLISTS:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TRACKNUM)
self.__nav_playlists(self.__env, self.__mpc, params)
elif navid == Nav.NAV_LIST:
if "albumartist" == params[0]:
xbmcplugin.setContent(self.__env.addon_handle(), "artists")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_ARTIST)
elif "album" == params[0]:
xbmcplugin.setContent(self.__env.addon_handle(), "albums")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_ALBUM)
elif "genre" == params[0]:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_GENRE)
self.__nav_list(self.__env, self.__mpc, params)
elif navid == Nav.NAV_FIND:
xbmcplugin.setContent(self.__env.addon_handle(), "songs")
#xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TITLE)
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TRACKNUM)
self.__nav_find(self.__env, self.__mpc, params)
elif navid == Nav.NAV_QUEUE:
xbmcplugin.setContent(self.__env.addon_handle(), "songs")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_NONE)
self.__nav_queue(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_ADD:
self.__action_add(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_LOAD:
self.__action_load(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_FINDADD:
self.__action_findadd(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_REMOVE:
self.__action_remove(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_CLEAR:
self.__action_clear(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_PLAY:
self.__action_play(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_PAUSE:
self.__action_pause(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_PREV:
self.__action_prev(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_NEXT:
self.__action_next(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_STOP:
self.__action_stop(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_OUTPUTS:
self.__action_outputs(self.__env, self.__mpc, params)
else:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
self.__nav_root(self.__env, self.__mpc, params)
self.__deinit()
def __connect_mpd(self):
self.__mpc.connect(
self.__env.setting("host"), self.__env.setting("port"))
return
def __deinit(self):
self.__mpc.disconnect()
xbmcplugin.endOfDirectory(self.__env.addon_handle())
def __nav_root(self, env, mpc, params=[]):
item = ItemRoot()
item.add(env, Nav.NAV_QUEUE, env.localized(Language.QUEUE), [], "DefaultMusicPlaylists.png")
item.add(env, Nav.NAV_FILE, env.localized(Language.FILES), ["/"], "")
item.add(env, Nav.NAV_LIST, env.localized(
Language.ARTISTS), ["albumartist"], "DefaultMusicArtists.png")
item.add(env, Nav.NAV_LIST, env.localized(
Language.ALBUMS), ["album"], "DefaultMusicAlbums.png")
item.add(env, Nav.NAV_LIST, env.localized(
Language.GENRE), ["genre"], "DefaultMusicGenres.png")
item.add(env, Nav.NAV_PLAYLISTS, env.localized(Language.PLAYLISTS), [], "DefaultMusicPlaylists.png")
return
def __nav_file(self, env, mpc, params=[]):
path = params[0]
item = ItemFile()
for metadata in mpc.lsinfo(path):
item.add(env, metadata)
return
def __nav_playlists(self, env, mpc, params=[]):
item = ItemFile()
for metadata in mpc.listplaylists():
item.add(env, metadata)
return
def __nav_pl(self, env, mpc, params=[]):
path = params[0]
item = ItemFile()
for metadata in mpc.listplaylistinfo(path):
item.add(env, metadata)
return
def __nav_list(self, env, mpc, params=[]):
item = ItemTag()
for tag in mpc.list(*params):
item.add(env, mpc, params[0], tag, params[1:])
return
def __nav_find(self, env, mpc, params=[]):
item = ItemFile()
for metadata in mpc.find(*params):
item.add(env, metadata)
return
def __nav_queue(self, env, mpc, params=[]):
item = ItemFile()
for metadata in mpc.playlistinfo():
item.add(env, metadata)
return
def __action_add(self, env, mpc, params=[]):
mpc.add(params[0])
xbmcgui.Dialog().notification(
"MPD", self.__env.localized(Language.SONGS_ADDED), xbmcgui.NOTIFICATION_INFO, 5000)
# mpc.play()
return
def __action_load(self, env, mpc, params=[]):
mpc.load(params[0])
xbmcgui.Dialog().notification(
"MPD", self.__env.localized(Language.SONGS_ADDED), xbmcgui.NOTIFICATION_INFO, 5000)
# mpc.play()
return
def __action_findadd(self, env, mpc, params=[]):
mpc.findadd(*params)
xbmcgui.Dialog().notification(
"MPD", self.__env.localized(Language.SONGS_ADDED), xbmcgui.NOTIFICATION_INFO, 5000)
# mpc.play()
return
def __action_play(self, env, mpc, params=[]):
if params[0] >=0:
mpc.play(int(params[0]))
else:
mpc.play()
return
def __action_pause(self, env, mpc, params=[]):
mpc.pause()
return
def __action_stop(self, env, mpc, params=[]):
mpc.stop()
return
def __action_prev(self, env, mpc, params=[]):
mpc.previous()
return
def __action_next(self, env, mpc, params=[]):
mpc.next()
return
def __action_remove(self, env, mpc, params=[]):
mpc.delete(params[0])
return
def __action_clear(self, env, mpc, params=[]):
mpc.clear()
return
def __action_outputs(self, env, mpc, params=[]):
outputs = []
outputids = []
for output in mpc.outputs():
if output["outputenabled"] == "1":
enabled = " [enabled]"
else:
enabled = " [disabled]"
outputs.append(output["outputname"] + enabled)
outputids.append(output["outputid"])
ret = xbmcgui.Dialog().select("Toggle outputs", outputs, False)
if ret >= 0:
mpc.toggleoutput(outputids[ret])
# xbmcgui.Dialog().notification("MPD",
# self.__env.localized(Language.SONGS_ADDED),
# xbmcgui.NOTIFICATION_INFO,
# 2000)
return
class Item:
def global_contextmenu(self, env, pospl=-1):
return [(env.localized(Language.PLAY), "RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_PLAY, [str(pospl)]) + ")"),
(env.localized(Language.PAUSE),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_PAUSE) + ")"),
(env.localized(Language.STOP),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_STOP) + ")"),
(env.localized(Language.PREVIOUS),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_PREV) + ")"),
(env.localized(Language.NEXT),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_NEXT) + ")"),
(env.localized(Language.CLEAR),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_CLEAR) + ")"),
(env.localized(Language.OUTPUTS), "RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_OUTPUTS) + ")"), ]
class ItemRoot(Item):
def add(self, env, navid, name, param, icon="DefaultFolder.png"):
li = xbmcgui.ListItem(name, iconImage=icon)
li.addContextMenuItems(self.global_contextmenu(env), True)
url = NavUrl.build_url(env, navid, param)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
class ItemTag(Item):
def add(self, env, mpc, tag, val, what):
#t = [tag, val] + what + ["0:1"]
#print t
#mpc.find(*t)
if "albumartist" == tag:
self.__add_artist(env, val, what)
elif "album" == tag:
self.__add_album(env, val, what)
elif "genre" == tag:
self.__add_genre(env, val, what)
return
def __add_artist(self, env, artist, what):
li = xbmcgui.ListItem(artist, iconImage="DefaultMusicArtists.png")
li.setInfo("music", {#"genre": metadata.get("genre", env.localized(Language.UNKNOWN)),
#"year": metadata.get("date", None),
#"title": metadata.get("title", ""),
#"album": metadata.get("album", env.localized(Language.UNKNOWN)),
"artist": artist,
#"duration": metadata.get("time", 0),
#"tracknumber": metadata.get("track", None),
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" + NavUrl.build_url(env,
Nav.ACTION_FINDADD, ["albumartist", artist] + what) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(
env, Nav.NAV_LIST, ["album", "albumartist", artist] + what)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_album(self, env, album, what):
li = xbmcgui.ListItem(album, iconImage="DefaultMusicAlbums.png")
li.setInfo("music", {#"genre": metadata.get("genre", env.localized(Language.UNKNOWN)),
#"year": metadata.get("date", None),
#"title": metadata.get("title", ""),
"album": album,
#"artist": artist,
#"duration": metadata.get("time", 0),
#"tracknumber": metadata.get("track", None),
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" + NavUrl.build_url(env,
Nav.ACTION_FINDADD, ["album", album] + what) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.NAV_FIND, ["album", album] + what)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_genre(self, env, genre, what):
li = xbmcgui.ListItem(genre, iconImage="DefaultMusicGenres.png")
li.setInfo("music", {"genre": genre,
#"year": metadata.get("date", None),
#"title": metadata.get("title", ""),
#"album": album,
#"artist": artist,
#"duration": metadata.get("time", 0),
#"tracknumber": metadata.get("track", None),
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" + NavUrl.build_url(env,
Nav.ACTION_FINDADD, ["genre", genre] + what) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(
env, Nav.NAV_LIST, ["albumartist", "genre", genre] + what)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
class ItemFile(Item):
def add(self, env, metadata):
if "directory" in metadata:
self.__add_dir(env, metadata)
elif "playlist" in metadata:
self.__add_playlist(env, metadata)
elif "file" in metadata:
self.__add_song(env, metadata)
return
def __add_dir(self, env, metadata):
path = metadata["directory"]
name = path[path.rfind("/") + 1:]
li = xbmcgui.ListItem(name, iconImage="DefaultFolder.png")
li.addContextMenuItems(
[(env.localized(Language.ADD),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_ADD, [path]) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.NAV_FILE, [path])
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_playlist(self, env, metadata):
path = metadata["playlist"]
name = path[path.rfind("/") + 1:]
li = xbmcgui.ListItem(name, iconImage="DefaultMusicPlaylists.png")
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" +
NavUrl.build_url(env, Nav.ACTION_LOAD, [path]) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.NAV_PL, [path])
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_song(self, env, metadata):
path = metadata["file"]
name = path[path.rfind("/") + 1:]
# If pos is given, this lists the current playlist and tracknumber
# is the position in the playlist instead of the album.
is_queue = "pos" in metadata
if is_queue:
pospl = int(metadata.get("pos", "-1"))
tracknumber = int(metadata.get("pos", "-1")) + 1
else:
pospl = -1
tracknumber = metadata.get("track", None)
li = xbmcgui.ListItem(name, iconImage="DefaultMusicSongs.png")
li.setInfo("music", {"genre": metadata.get("genre", env.localized(Language.UNKNOWN)),
"year": metadata.get("date", None),
"title": metadata.get("title", ""),
"album": metadata.get("album", env.localized(Language.UNKNOWN)),
"artist": metadata.get("artist", env.localized(Language.UNKNOWN)),
"duration": metadata.get("time", 0),
"tracknumber": tracknumber,
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
if is_queue:
li.addContextMenuItems(
[(env.localized(Language.REMOVE), "RunPlugin(" +
NavUrl.build_url(env, Nav.ACTION_REMOVE, [metadata.get("pos", "-1")]) + ")"), ]
+ self.global_contextmenu(env, pospl), True)
url = NavUrl.build_url(env, Nav.ACTION_PLAY, [str(pospl)])
else:
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" +
NavUrl.build_url(env, Nav.ACTION_ADD, [path]) + ")"), ]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.ACTION_ADD, [path])
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(), url=url, listitem=li)
return
| gpl-3.0 | -9,181,020,625,052,426,000 | 39.342268 | 163 | 0.525759 | false |
Psycojoker/hackeragenda | hackeragenda/settings.py | 1 | 5897 | # Django settings for hackeragenda project.
import os
from collections import OrderedDict
PROJECT_PATH = os.path.abspath(os.path.split(__file__)[0])
SUBPROJECT_PATH = os.path.split(PROJECT_PATH)[0]
BASE_DIR = PROJECT_PATH # to avoid stupid warning from django 1.6
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
ALLOWED_HOSTS = ['*']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite',
}
}
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
TIME_ZONE = 'Europe/Brussels'
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
USE_TZ = False
MEDIA_ROOT = ''
MEDIA_URL = '/medias/'
STATIC_ROOT = SUBPROJECT_PATH + '/static_deploy/static/'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(SUBPROJECT_PATH, "static"),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
SECRET_KEY = 't)^bq6!v8!vj$+t+!4x1+uj100d73_8pt5d1(gh=py=lz7$^vm'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(SUBPROJECT_PATH, "templates"),],
# 'APP_DIRS': True,
'OPTIONS': {
'loaders': [
'hamlpy.template.loaders.HamlPyFilesystemLoader',
'hamlpy.template.loaders.HamlPyAppDirectoriesLoader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
PREDEFINED_FILTERS = OrderedDict()
PREDEFINED_FILTERS["default"] = {
"source": [
'afpyro',
'agenda_du_libre_be',
'belgian_blender_user_group',
'belgium_python_meetup',
'bhackspace',
'blender_brussels',
'brixel',
'bxlug',
'constantvzw',
'F_LAT',
'foam',
'hsbxl',
'incubhacker',
'jeudi_du_libre_mons',
'ko_lab',
'makilab',
'neutrinet',
'npbbxl',
'okfnbe',
'okno',
'opengarage',
'opengarage_meetings',
'openstreetmap_belgium',
'opentechschool',
'owaspbe',
'realize',
'source',
'syn2cat',
'urlab',
'voidwarranties',
'whitespace',
'wolfplex',
],
"exclude_source": [],
"tag": [],
"exclude_tag": ["meeting", "on_reservation"],
}
PREDEFINED_FILTERS["all"] = {
"source": [],
"exclude_source": [],
"tag": [],
"exclude_tag": [],
}
PREDEFINED_FILTERS["hackerspaces"] = {
"source": [
"brixel",
"bhackspace",
"hsbxl",
"incubhacker",
"opengarage",
"syn2cat",
"urlab",
"voidwarranties",
"whitespace",
"wolfplex",
"ko_lab"
],
"exclude_source": [],
"tag": [],
"exclude_tag": [],
}
PREDEFINED_FILTERS["*lab"] = {
"source": [],
"exclude_source": [],
"tag": ["fablab"],
"exclude_tag": [],
}
PREDEFINED_FILTERS["art"] = {
"source": [],
"exclude_source": [],
"tag": ["art"],
"exclude_tag": [],
}
PREDEFINED_FILTERS["code"] = {
"source": [],
"exclude_source": [],
"tag": ["code"],
"exclude_tag": [],
}
if DEBUG:
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
# Needed for the admin interface
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
INTERNAL_IPS = ('127.0.0.1',)
ROOT_URLCONF = 'hackeragenda.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'hackeragenda.wsgi.application'
LEAFLET_CONFIG = {
'DEFAULT_CENTER': (50.6407351, 4.66696),
'DEFAULT_ZOOM': 7,
'MIN_ZOOM': 2,
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'authentication',
'administration',
'events',
'taggit',
'gunicorn',
'leaflet',
)
AGENDA = "be"
if DEBUG:
INSTALLED_APPS += (
'debug_toolbar',
'django_pdb',
'django_extensions',
)
FIXTURE_DIRS = (
'fixtures',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
LOGIN_REDIRECT_URL = '/administration/'
try:
from settings_local import *
except ImportError:
pass
| gpl-3.0 | -9,104,221,644,388,092,000 | 21.76834 | 72 | 0.575886 | false |
mbr/flatland0 | tests/schema/test_dicts.py | 1 | 17601 | from flatland import (
Dict,
Integer,
String,
SparseDict,
Unset,
)
from flatland.util import Unspecified, keyslice_pairs
from tests._util import (
asciistr,
udict,
unicode_coercion_available,
)
import six
import pytest
def test_dict():
with pytest.raises(TypeError):
Dict()
def test_dict_immutable_keys():
schema = Dict.of(Integer.named(u'x'), Integer.named(u'y'))
el = schema()
with pytest.raises(TypeError):
el[u'z'] = 123
with pytest.raises(TypeError):
del el[u'x']
with pytest.raises(KeyError):
del el[u'z']
with pytest.raises(TypeError):
el.setdefault(u'x', 123)
with pytest.raises(TypeError):
el.setdefault(u'z', 123)
with pytest.raises(TypeError):
el.pop(u'x')
with pytest.raises(KeyError):
el.pop(u'z')
with pytest.raises(TypeError):
el.popitem()
with pytest.raises(TypeError):
el.clear()
def test_dict_reads():
schema = Dict.of(Integer.named(u'x'), Integer.named(u'y'))
el = schema()
el[u'x'].set(u'10')
el[u'y'].set(u'20')
assert el[u'x'].value == 10
assert el[u'y'].value == 20
# the values are unhashable Elements, so this is a little painful
assert set(el.keys()) == set(u'xy')
assert (set([(u'x', 10), (u'y', 20)]) ==
set([(_[0], _[1].value) for _ in el.items()]))
assert set([10, 20]) == set([_.value for _ in el.values()])
assert el.get(u'x').value == 10
el[u'x'] = None
assert el.get(u'x').value is None
assert el.get(u'x', 'default is never used').value is None
with pytest.raises(KeyError):
el.get(u'z')
with pytest.raises(KeyError):
el.get(u'z', 'even with a default')
def test_dict_update():
schema = Dict.of(Integer.named(u'x'), Integer.named(u'y'))
el = schema()
def value_dict(element):
return dict((k, v.value) for k, v in six.iteritems(element))
try:
el.update(x=20, y=30)
except UnicodeError:
assert not unicode_coercion_available()
el.update(udict(x=20, y=30))
assert udict(x=20, y=30) == el.value
el.update({u'y': 40})
assert udict(x=20, y=40) == el.value
el.update()
assert udict(x=20, y=40) == el.value
el.update((_, 100) for _ in u'xy')
assert udict(x=100, y=100) == el.value
try:
el.update([(u'x', 1)], y=2)
assert udict(x=1, y=2) == el.value
except UnicodeError:
assert not unicode_coercion_available()
try:
el.update([(u'x', 10), (u'y', 10)], x=20, y=20)
assert udict(x=20, y=20) == el.value
except UnicodeError:
assert not unicode_coercion_available()
if unicode_coercion_available():
with pytest.raises(TypeError):
el.update(z=1)
with pytest.raises(TypeError):
el.update(x=1, z=1)
with pytest.raises(TypeError):
el.update({u'z': 1})
with pytest.raises(TypeError):
el.update({u'x': 1, u'z': 1})
with pytest.raises(TypeError):
el.update({u'z': 1})
with pytest.raises(TypeError):
el.update({u'x': 1, u'z': 1})
class DictSetTest(object):
schema = Dict
policy = Unspecified
x_default = Unspecified
y_default = Unspecified
def new_schema(self):
dictkw, x_kw, y_kw = {}, {}, {}
if self.policy is not Unspecified:
dictkw['policy'] = self.policy
if self.x_default is not Unspecified:
x_kw['default'] = self.x_default
if self.y_default is not Unspecified:
y_kw['default'] = self.y_default
return self.schema.named(u's').using(**dictkw).of(
Integer.named(u'x').using(**x_kw),
Integer.named(u'y').using(**y_kw))
def new_element(self, schema=Unspecified, **kw):
if schema is Unspecified:
schema = self.new_schema()
return schema(**kw)
def test_empty_sets(self):
wanted = {u'x': None, u'y': None}
el = self.new_element()
assert el.value == wanted
el.set({})
assert el.value == wanted
el = self.new_element(value={})
assert el.value == wanted
el = self.new_element(value=iter(()))
assert el.value == wanted
el = self.new_element(value=())
assert el.value == wanted
def test_empty_set_flat(self):
el = self.new_element()
el.set_flat(())
assert el.value == {u'x': None, u'y': None}
def test_half_set(self):
wanted = {u'x': 123, u'y': None}
el = self.new_element()
el.set({u'x': 123})
assert el.value == wanted
el = self.new_element()
el.set([(u'x', 123)])
assert el.value == wanted
def test_half_set_flat(self):
wanted = {u'x': 123, u'y': None}
pairs = ((u's_x', u'123'),)
el = self.new_element()
el.set_flat(pairs)
assert el.value == wanted
def test_full_set(self):
wanted = {u'x': 101, u'y': 102}
el = self.new_element()
el.set(wanted)
assert el.value == wanted
el = self.new_element()
el.set(udict(x=101, y=102))
assert el.value == wanted
el = self.new_element()
el.set([(u'x', 101), (u'y', 102)])
assert el.value == wanted
el = self.new_element(value=wanted)
assert el.value == wanted
def test_full_set_flat(self):
wanted = {u'x': 101, u'y': 102}
pairs = ((u's_x', u'101'), (u's_y', u'102'))
el = self.new_element()
el.set_flat(pairs)
assert el.value == wanted
def test_scalar_set_flat(self):
wanted = {u'x': None, u'y': None}
pairs = ((u's', u'xxx'),)
el = self.new_element()
canary = []
def setter(self, value):
canary.append(value)
return type(el).set(self, value)
el.set = setter.__get__(el, type(el))
el.set_flat(pairs)
assert el.value == wanted
assert canary == []
def test_over_set(self):
too_much = {u'x': 1, u'y': 2, u'z': 3}
el = self.new_element()
with pytest.raises(KeyError):
el.set(too_much)
with pytest.raises(KeyError):
self.new_element(value=too_much)
def test_over_set_flat(self):
wanted = {u'x': 123, u'y': None}
pairs = ((u's_x', u'123'), (u's_z', u'nope'))
el = self.new_element()
el.set_flat(pairs)
assert el.value == wanted
def test_total_miss(self):
miss = {u'z': 3}
el = self.new_element()
with pytest.raises(KeyError):
el.set(miss)
with pytest.raises(KeyError):
self.new_element(value=miss)
def test_total_miss_flat(self):
pairs = ((u'miss', u'10'),)
el = self.new_element()
el.set_flat(pairs)
assert el.value == {u'x': None, u'y': None}
def test_set_return(self):
el = self.new_element()
assert el.set({u'x': 1, u'y': 2})
el = self.new_element()
assert not el.set({u'x': u'i am the new', u'y': u'number two'})
def test_set_default(self):
wanted = {u'x': 11, u'y': 12}
schema = self.new_schema()
schema.default = wanted
el = schema()
el.set_default()
assert el.value == wanted
def test_set_default_from_children(self):
el = self.new_element()
el.set_default()
wanted = {
u'x': self.x_default if self.x_default is not Unspecified
else None,
u'y': self.y_default if self.y_default is not Unspecified
else None,
}
assert el.value == wanted
class TestEmptyDictSet(DictSetTest):
pass
class TestDefaultDictSet(DictSetTest):
x_default = 10
y_default = 20
class TestEmptySparseDictRequiredSet(DictSetTest):
schema = SparseDict.using(minimum_fields='required')
def test_dict_valid_policies():
schema = Dict.of(Integer)
el = schema()
with pytest.raises(AssertionError):
el.set({}, policy='bogus')
def test_dict_strict():
# a mini test, this policy thing may get whacked
schema = Dict.using(policy='strict').of(Integer.named(u'x'),
Integer.named(u'y'))
el = schema({u'x': 123, u'y': 456})
el = schema()
with pytest.raises(TypeError):
el.set({u'x': 123})
el = schema()
with pytest.raises(KeyError):
el.set({u'x': 123, u'y': 456, u'z': 7})
def test_dict_raw():
schema = Dict.of(Integer.named('x').using(optional=False))
el = schema()
assert el.raw is Unset
el = schema({u'x': u'bar'})
assert el.raw == {u'x': u'bar'}
el = schema([(u'x', u'bar')])
assert el.raw == [(u'x', u'bar')]
el.set_flat([(u'x', u'123')])
assert el.raw is Unset
el = schema.from_flat([(u'x', u'123')])
assert el.raw is Unset
assert el[u'x'].raw == u'123'
def test_dict_as_unicode():
schema = Dict.of(Integer.named(u'x'), Integer.named(u'y'))
el = schema({u'x': 1, u'y': 2})
assert el.u in (u"{u'x': u'1', u'y': u'2'}", u"{u'y': u'2', u'x': u'1'}")
def test_nested_dict_as_unicode():
schema = Dict.of(Dict.named(u'd').of(
Integer.named(u'x').using(default=10)))
el = schema.from_defaults()
assert el.value == {u'd': {u'x': 10}}
assert el.u == u"{u'd': {u'x': u'10'}}"
def test_nested_unicode_dict_as_unicode():
schema = Dict.of(Dict.named(u'd').of(
String.named(u'x').using(default=u'\u2308\u2309')))
el = schema.from_defaults()
assert el.value == {u'd': {u'x': u'\u2308\u2309'}}
assert el.u == u"{u'd': {u'x': u'\u2308\u2309'}}"
def test_dict_el():
# stub
schema = Dict.named(u's').of(Integer.named(u'x'), Integer.named(u'y'))
element = schema()
assert element.el(u'x').name == u'x'
with pytest.raises(KeyError):
element.el(u'not_x')
def test_update_object():
class Obj(object):
def __init__(self, **kw):
for (k, v) in kw.items():
setattr(self, k, v)
schema = Dict.of(String.named(u'x'), String.named(u'y'))
o = Obj()
assert not hasattr(o, 'x')
assert not hasattr(o, 'y')
def updated_(obj_factory, initial_value, wanted=None, **update_kw):
el = schema(initial_value)
obj = obj_factory()
keyfunc = lambda x: x if six.PY3 else asciistr(x)
update_kw.setdefault('key', keyfunc)
el.update_object(obj, **update_kw)
if wanted is None:
wanted = dict((keyfunc(k), v) for k, v in initial_value.items())
have = dict(obj.__dict__)
assert have == wanted
updated_(Obj, {u'x': u'X', u'y': u'Y'})
updated_(Obj, {u'x': u'X'}, {'x': u'X', 'y': None})
updated_(lambda: Obj(y=u'Y'), {u'x': u'X'}, {'x': u'X', 'y': None})
updated_(lambda: Obj(y=u'Y'), {u'x': u'X'}, {'x': u'X', 'y': u'Y'},
omit=('y',))
updated_(lambda: Obj(y=u'Y'), {u'x': u'X'}, {'y': u'Y'},
include=(u'z',))
updated_(Obj, {u'x': u'X'}, {'y': None, 'z': u'X'},
rename=(('x', 'z'),))
def test_slice():
schema = Dict.of(String.named(u'x'), String.named(u'y'))
def same_(source, kw):
el = schema(source)
sliced = el.slice(**kw)
wanted = dict(keyslice_pairs(el.value.items(), **kw))
assert sliced == wanted
assert (set(type(_) for _ in sliced.keys()),
set(type(_) for _ in wanted.keys()))
yield same_, {u'x': u'X', u'y': u'Y'}, {}
keyfunc = lambda x: x if six.PY3 else asciistr(x)
yield same_, {u'x': u'X', u'y': u'Y'}, dict(key=keyfunc)
yield same_, {u'x': u'X', u'y': u'Y'}, dict(include=[u'x'])
yield same_, {u'x': u'X', u'y': u'Y'}, dict(omit=[u'x'])
yield same_, {u'x': u'X', u'y': u'Y'}, dict(omit=[u'x'],
rename={u'y': u'z'})
def test_sparsedict_key_mutability():
schema = SparseDict.of(Integer.named(u'x'), Integer.named(u'y'))
el = schema()
ok, bogus = u'x', u'z'
el[ok] = 123
assert el[ok].value == 123
with pytest.raises(TypeError):
el.__setitem__(bogus, 123)
del el[ok]
assert ok not in el
with pytest.raises(TypeError):
el.__delitem__(bogus)
assert el.setdefault(ok, 456)
with pytest.raises(TypeError):
el.setdefault(bogus, 456)
el[ok] = 123
assert el.pop(ok)
with pytest.raises(KeyError):
el.pop(bogus)
with pytest.raises(NotImplementedError):
el.popitem()
el.clear()
assert not el
def test_sparsedict_operations():
schema = SparseDict.of(Integer.named(u'x'), Integer.named(u'y'))
el = schema()
el[u'x'] = 123
del el[u'x']
with pytest.raises(KeyError):
el.__delitem__(u'x')
assert el.setdefault(u'x', 123) == 123
assert el.setdefault(u'x', 456) == 123
assert el.setdefault(u'y', 123) == 123
assert el.setdefault(u'y', 456) == 123
assert schema().is_empty
assert not schema().validate()
opt_schema = schema.using(optional=True)
assert opt_schema().validate()
def test_sparsedict_required_operations():
schema = (SparseDict.using(minimum_fields='required').
of(Integer.named(u'opt').using(optional=True),
Integer.named(u'req')))
el = schema({u'opt': 123, u'req': 456})
del el[u'opt']
with pytest.raises(KeyError):
el.__delitem__(u'opt')
with pytest.raises(TypeError):
el.__delitem__(u'req')
el = schema()
assert el.setdefault(u'opt', 123) == 123
assert el.setdefault(u'opt', 456) == 123
assert el.setdefault(u'req', 123) == 123
assert el.setdefault(u'req', 456) == 123
assert not schema().is_empty
assert not schema().validate()
def test_sparsedict_set_default():
schema = SparseDict.of(Integer.named(u'x').using(default=123),
Integer.named(u'y'))
el = schema()
el.set_default()
assert el.value == {}
def test_sparsedict_required_set_default():
schema = (SparseDict.using(minimum_fields='required').
of(Integer.named(u'x').using(default=123),
Integer.named(u'y').using(default=456, optional=True),
Integer.named(u'z').using(optional=True)))
el = schema()
el.set_default()
assert el.value == {u'x': 123}
def test_sparsedict_bogus_set_default():
schema = (SparseDict.using(minimum_fields='bogus').
of(Integer.named(u'x')))
el = schema()
with pytest.raises(RuntimeError):
el.set_default()
def test_sparsedict_required_key_mutability():
schema = (SparseDict.of(Integer.named(u'x').using(optional=True),
Integer.named(u'y')).
using(minimum_fields='required'))
el = schema()
ok, required, bogus = u'x', u'y', u'z'
assert ok not in el
assert required in el
assert bogus not in el
el[ok] = 123
assert el[ok].value == 123
el[required] = 456
assert el[required].value == 456
with pytest.raises(TypeError):
el.__setitem__(bogus, 123)
del el[ok]
assert ok not in el
with pytest.raises(TypeError):
el.__delitem__(required)
with pytest.raises(TypeError):
el.__delitem__(bogus)
assert el.setdefault(ok, 456)
assert el.setdefault(required, 789)
with pytest.raises(TypeError):
el.setdefault(bogus, 456)
el[ok] = 123
assert el.pop(ok)
el[required] = 456
with pytest.raises(TypeError):
el.pop(required)
with pytest.raises(KeyError):
el.pop(bogus)
with pytest.raises(NotImplementedError):
el.popitem()
el.clear()
assert list(el.keys()) == [required]
def test_sparsedict_from_flat():
schema = SparseDict.of(Integer.named(u'x'),
Integer.named(u'y'))
el = schema.from_flat([])
assert list(el.items()) == []
el = schema.from_flat([(u'x', u'123')])
assert el.value == {u'x': 123}
el = schema.from_flat([(u'x', u'123'), (u'z', u'456')])
assert el.value == {u'x': 123}
def test_sparsedict_required_from_flat():
schema = (SparseDict.of(Integer.named(u'x'),
Integer.named(u'y').using(optional=True)).
using(minimum_fields='required'))
el = schema.from_flat([])
assert el.value == {u'x': None}
el = schema.from_flat([(u'x', u'123')])
assert el.value == {u'x': 123}
el = schema.from_flat([(u'y', u'456'), (u'z', u'789')])
assert el.value == {u'x': None, u'y': 456}
def test_sparsedict_required_validation():
schema = (SparseDict.of(Integer.named(u'x'),
Integer.named(u'y').using(optional=True)).
using(minimum_fields='required'))
el = schema()
assert not el.validate()
el = schema({u'y': 456})
assert not el.validate()
el = schema({u'x': 123, u'y': 456})
assert el.validate()
def test_sparsedict_flattening():
schema = (SparseDict.named(u'top').
of(Integer.named(u'x'), Integer.named(u'y')))
els = [
schema({'x': 123, 'y': 456}),
schema(),
schema(),
schema(),
]
els[1].set({'x': 123, 'y': 456})
els[2]['x'] = 123
els[2]['y'] = 456
els[3]['x'] = Integer(123)
els[3]['y'] = Integer(456)
wanted = [(u'top_x', u'123'), (u'top_y', u'456')]
for el in els:
got = sorted(el.flatten())
assert wanted == got
| mit | -5,521,041,941,222,855,000 | 25.075556 | 77 | 0.546276 | false |
wooga/airflow | tests/providers/microsoft/azure/operators/test_file_to_wasb.py | 1 | 2779 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import datetime
import unittest
import mock
from airflow.models.dag import DAG
from airflow.providers.microsoft.azure.operators.file_to_wasb import FileToWasbOperator
class TestFileToWasbOperator(unittest.TestCase):
_config = {
'file_path': 'file',
'container_name': 'container',
'blob_name': 'blob',
'wasb_conn_id': 'wasb_default',
'retries': 3,
}
def setUp(self):
args = {
'owner': 'airflow',
'start_date': datetime.datetime(2017, 1, 1)
}
self.dag = DAG('test_dag_id', default_args=args)
def test_init(self):
operator = FileToWasbOperator(
task_id='wasb_operator_1',
dag=self.dag,
**self._config
)
self.assertEqual(operator.file_path, self._config['file_path'])
self.assertEqual(operator.container_name,
self._config['container_name'])
self.assertEqual(operator.blob_name, self._config['blob_name'])
self.assertEqual(operator.wasb_conn_id, self._config['wasb_conn_id'])
self.assertEqual(operator.load_options, {})
self.assertEqual(operator.retries, self._config['retries'])
operator = FileToWasbOperator(
task_id='wasb_operator_2',
dag=self.dag,
load_options={'timeout': 2},
**self._config
)
self.assertEqual(operator.load_options, {'timeout': 2})
@mock.patch('airflow.providers.microsoft.azure.operators.file_to_wasb.WasbHook',
autospec=True)
def test_execute(self, mock_hook):
mock_instance = mock_hook.return_value
operator = FileToWasbOperator(
task_id='wasb_sensor',
dag=self.dag,
load_options={'timeout': 2},
**self._config
)
operator.execute(None)
mock_instance.load_file.assert_called_once_with(
'file', 'container', 'blob', timeout=2
)
| apache-2.0 | -3,118,337,514,945,456,000 | 33.308642 | 87 | 0.635121 | false |
lizardsystem/lizard-map | lizard_map/daterange.py | 1 | 2259 | """Handle the date range setting and remembering
"""
import datetime
import logging
from django.conf import settings
import pytz
# NOTE: this module is obsolete as date ranges are entirely handled in
# javascript and should be passed as request parameter
logger = logging.getLogger(__name__)
# Session data postfixed with '_3' as the meaning changed between versions.
SESSION_DT_RANGETYPE = 'dt_rangetype_3'
SESSION_DT_START = 'dt_start_3'
SESSION_DT_END = 'dt_end_3'
default_start_days = getattr(settings, 'DEFAULT_START_DAYS', -2)
default_end_days = getattr(settings, 'DEFAULT_END_DAYS', 0)
def default_start(now):
"""Return default start date when period is PERIOD_OTHER."""
return now + datetime.timedelta(days=default_start_days)
def default_end(now):
"""Return default end date when period is PERIOD_OTHER."""
return now + datetime.timedelta(days=default_end_days)
def current_period(request):
"""
Return the current period, either default or from session.
TODO: mix together with current_start_end_dates (but is has a lot
of impact)
"""
default_period = getattr(settings, 'DEFAULT_RANGE_TYPE', 'week_plus_one')
if request is None:
return default_period
else:
return request.session.get(SESSION_DT_RANGETYPE, default_period)
def current_start_end_dates(request, for_form=False, today=None,
retrieve_period_function=current_period):
"""Return the current start datetime and end datetime.
If for_form is True, this function returns the datetime's as a dictionary
so the client can pass that directly into a form class. If for_form is not
True, this functions returns them as a tuple.
Other parameter:
*today*
datetime to initialize the current datetime (for testing purposes)
*retrieve_period_function*
function to retrieve the period type (for testing purposes)
"""
today = datetime.datetime.now(tz=pytz.UTC)
session = request.session
dt_start = session.get(SESSION_DT_START, default_start(today))
dt_end = session.get(SESSION_DT_END, default_end(today))
if for_form:
return dict(dt_start=dt_start, dt_end=dt_end)
else:
return (dt_start, dt_end)
| lgpl-3.0 | -1,085,294,113,683,170,700 | 29.527027 | 78 | 0.698539 | false |
mlperf/training_results_v0.5 | v0.5.0/nvidia/submission/code/object_detection/pytorch/maskrcnn_benchmark/config/defaults.py | 1 | 11814 |
#
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# Copyright (c) 2017-2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from yacs.config import CfgNode as CN
# -----------------------------------------------------------------------------
# Convention about Training / Test specific parameters
# -----------------------------------------------------------------------------
# Whenever an argument can be either used for training or for testing, the
# corresponding name will be post-fixed by a _TRAIN for a training parameter,
# or _TEST for a test-specific parameter.
# For example, the number of images during training will be
# IMAGES_PER_BATCH_TRAIN, while the number of images for testing will be
# IMAGES_PER_BATCH_TEST
# -----------------------------------------------------------------------------
# Config definition
# -----------------------------------------------------------------------------
_C = CN()
_C.MODEL = CN()
_C.MODEL.RPN_ONLY = False
_C.MODEL.MASK_ON = False
_C.MODEL.DEVICE = "cuda"
_C.MODEL.META_ARCHITECTURE = "GeneralizedRCNN"
# If the WEIGHT starts with a catalog://, like :R-50, the code will look for
# the path in paths_catalog. Else, it will use it as the specified absolute
# path
_C.MODEL.WEIGHT = ""
# -----------------------------------------------------------------------------
# Load pre-trained models from C2 Detectron
# -----------------------------------------------------------------------------
_C.MODEL.C2_COMPAT = CN()
# Weight file from C2 Detectron. Should be in .pkl format
_C.MODEL.C2_COMPAT.WEIGHTS = ""
# Name of the function that loads the C2 weights into our PyTorch model
_C.MODEL.C2_COMPAT.WEIGHT_LOADER = ""
# Load from C2 Detectron or not
_C.MODEL.C2_COMPAT.ENABLED = False
# -----------------------------------------------------------------------------
# INPUT
# -----------------------------------------------------------------------------
_C.INPUT = CN()
# Size of the smallest side of the image during training
_C.INPUT.MIN_SIZE_TRAIN = 800 # (800,)
# Maximum size of the side of the image during training
_C.INPUT.MAX_SIZE_TRAIN = 1333
# Size of the smallest side of the image during testing
_C.INPUT.MIN_SIZE_TEST = 800
# Maximum size of the side of the image during testing
_C.INPUT.MAX_SIZE_TEST = 1333
# Values to be used for image normalization
_C.INPUT.PIXEL_MEAN = [102.9801, 115.9465, 122.7717]
# Values to be used for image normalization
_C.INPUT.PIXEL_STD = [1., 1., 1.]
# -----------------------------------------------------------------------------
# Dataset
# -----------------------------------------------------------------------------
_C.DATASETS = CN()
# List of the dataset names for training, as present in paths_catalog.py
_C.DATASETS.TRAIN = ()
# List of the dataset names for testing, as present in paths_catalog.py
_C.DATASETS.TEST = ()
# -----------------------------------------------------------------------------
# DataLoader
# -----------------------------------------------------------------------------
_C.DATALOADER = CN()
# Number of data loading threads
_C.DATALOADER.NUM_WORKERS = 4
# If > 0, this enforces that each collated batch should have a size divisible
# by SIZE_DIVISIBILITY
_C.DATALOADER.SIZE_DIVISIBILITY = 0
# Number of images per batch
_C.DATALOADER.IMAGES_PER_BATCH_TRAIN = 2
_C.DATALOADER.IMAGES_PER_BATCH_TEST = 1
# If True, each batch should contain only images for which the aspect ratio
# is compatible. This groups portrait images together, and landscape images
# are not batched with portrait images.
_C.DATALOADER.ASPECT_RATIO_GROUPING = True
# ---------------------------------------------------------------------------- #
# Backbone options
# ---------------------------------------------------------------------------- #
_C.MODEL.BACKBONE = CN()
# The backbone conv body to use
# The string must match a function that is imported in modeling.model_builder
# (e.g., 'FPN.add_fpn_ResNet101_conv5_body' to specify a ResNet-101-FPN
# backbone)
_C.MODEL.BACKBONE.CONV_BODY = "R-50-C4"
# Add StopGrad at a specified stage so the bottom layers are frozen
_C.MODEL.BACKBONE.FREEZE_CONV_BODY_AT = 2
_C.MODEL.BACKBONE.OUT_CHANNELS = 256 * 4
# ---------------------------------------------------------------------------- #
# RPN options
# ---------------------------------------------------------------------------- #
_C.MODEL.RPN = CN()
_C.MODEL.RPN.USE_FPN = False
# RPN anchor sizes given in relative size w.r.t. BASE_ANCHOR_SIZE
_C.MODEL.RPN.SCALES = (0.125, 0.25, 0.5, 1., 2.)
# Base RPN anchor size given in absolute pixels w.r.t. the scaled network input
_C.MODEL.RPN.BASE_ANCHOR_SIZE = 256
# Stride of the feature map that RPN is attached.
# For FPN, number of strides should match number of scales
_C.MODEL.RPN.ANCHOR_STRIDE = (16,)
# RPN anchor aspect ratios
_C.MODEL.RPN.ASPECT_RATIOS = (0.5, 1.0, 2.0)
# Remove RPN anchors that go outside the image by RPN_STRADDLE_THRESH pixels
# Set to -1 or a large value, e.g. 100000, to disable pruning anchors
_C.MODEL.RPN.STRADDLE_THRESH = 0
# Minimum overlap required between an anchor and ground-truth box for the
# (anchor, gt box) pair to be a positive example (IoU >= FG_IOU_THRESHOLD
# ==> positive RPN example)
_C.MODEL.RPN.FG_IOU_THRESHOLD = 0.7
# Maximum overlap allowed between an anchor and ground-truth box for the
# (anchor, gt box) pair to be a negative examples (IoU < BG_IOU_THRESHOLD
# ==> negative RPN example)
_C.MODEL.RPN.BG_IOU_THRESHOLD = 0.3
# Total number of RPN examples per image
_C.MODEL.RPN.BATCH_SIZE_PER_IMAGE = 256
# Target fraction of foreground (positive) examples per RPN minibatch
_C.MODEL.RPN.POSITIVE_FRACTION = 0.5
# Number of top scoring RPN proposals to keep before applying NMS
# When FPN is used, this is *per FPN level* (not total)
_C.MODEL.RPN.PRE_NMS_TOP_N_TRAIN = 12000
_C.MODEL.RPN.PRE_NMS_TOP_N_TEST = 6000
# Number of top scoring RPN proposals to keep after applying NMS
_C.MODEL.RPN.POST_NMS_TOP_N_TRAIN = 2000
_C.MODEL.RPN.POST_NMS_TOP_N_TEST = 1000
# NMS threshold used on RPN proposals
_C.MODEL.RPN.NMS_THRESH = 0.7
# Proposal height and width both need to be greater than RPN_MIN_SIZE
# (a the scale used during training or inference)
_C.MODEL.RPN.MIN_SIZE = 0
# Number of top scoring RPN proposals to keep after combining proposals from
# all FPN levels
_C.MODEL.RPN.FPN_POST_NMS_TOP_N_TRAIN = 2000
_C.MODEL.RPN.FPN_POST_NMS_TOP_N_TEST = 2000
# ---------------------------------------------------------------------------- #
# ROI HEADS options
# ---------------------------------------------------------------------------- #
_C.MODEL.ROI_HEADS = CN()
_C.MODEL.ROI_HEADS.USE_FPN = False
# Overlap threshold for an RoI to be considered foreground (if >= FG_IOU_THRESHOLD)
_C.MODEL.ROI_HEADS.FG_IOU_THRESHOLD = 0.5
# Overlap threshold for an RoI to be considered background
# (class = 0 if overlap in [0, BG_IOU_THRESHOLD))
_C.MODEL.ROI_HEADS.BG_IOU_THRESHOLD = 0.5
# Default weights on (dx, dy, dw, dh) for normalizing bbox regression targets
# These are empirically chosen to approximately lead to unit variance targets
_C.MODEL.ROI_HEADS.BBOX_REG_WEIGHTS = (10., 10., 5., 5.)
# RoI minibatch size *per image* (number of regions of interest [ROIs])
# Total number of RoIs per training minibatch =
# TRAIN.BATCH_SIZE_PER_IM * TRAIN.IMS_PER_BATCH * NUM_GPUS
# E.g., a common configuration is: 512 * 2 * 8 = 8192
_C.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 512
# Target fraction of RoI minibatch that is labeled foreground (i.e. class > 0)
_C.MODEL.ROI_HEADS.POSITIVE_FRACTION = 0.25
# Only used on test mode
# Minimum score threshold (assuming scores in a [0, 1] range); a value chosen to
# balance obtaining high recall with not having too many low precision
# detections that will slow down inference post processing steps (like NMS)
_C.MODEL.ROI_HEADS.SCORE_THRESH = 0.05
# Overlap threshold used for non-maximum suppression (suppress boxes with
# IoU >= this threshold)
_C.MODEL.ROI_HEADS.NMS = 0.5
# Maximum number of detections to return per image (100 is based on the limit
# established for the COCO dataset)
_C.MODEL.ROI_HEADS.DETECTIONS_PER_IMG = 100
_C.MODEL.ROI_BOX_HEAD = CN()
_C.MODEL.ROI_BOX_HEAD.FEATURE_EXTRACTOR = "ResNet50Conv5ROIFeatureExtractor"
_C.MODEL.ROI_BOX_HEAD.PREDICTOR = "FastRCNNPredictor"
_C.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION = 14
_C.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO = 0
_C.MODEL.ROI_BOX_HEAD.POOLER_SCALES = (1.0 / 16,)
_C.MODEL.ROI_BOX_HEAD.NUM_CLASSES = 81
# Hidden layer dimension when using an MLP for the RoI box head
_C.MODEL.ROI_BOX_HEAD.MLP_HEAD_DIM = 1024
_C.MODEL.ROI_MASK_HEAD = CN()
_C.MODEL.ROI_MASK_HEAD.FEATURE_EXTRACTOR = "ResNet50Conv5ROIFeatureExtractor"
_C.MODEL.ROI_MASK_HEAD.PREDICTOR = "MaskRCNNC4Predictor"
_C.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION = 14
_C.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO = 0
_C.MODEL.ROI_MASK_HEAD.POOLER_SCALES = (1.0 / 16,)
_C.MODEL.ROI_MASK_HEAD.MLP_HEAD_DIM = 1024
_C.MODEL.ROI_MASK_HEAD.CONV_LAYERS = (256, 256, 256, 256)
_C.MODEL.ROI_MASK_HEAD.RESOLUTION = 14
_C.MODEL.SHARE_FEATURES_DURING_TRAINING = True
# ---------------------------------------------------------------------------- #
# ResNe[X]t options (ResNets = {ResNet, ResNeXt}
# Note that parts of a resnet may be used for both the backbone and the head
# These options apply to both
# ---------------------------------------------------------------------------- #
_C.MODEL.RESNETS = CN()
# Number of groups to use; 1 ==> ResNet; > 1 ==> ResNeXt
_C.MODEL.RESNETS.NUM_GROUPS = 1
# Baseline width of each group
_C.MODEL.RESNETS.WIDTH_PER_GROUP = 64
# Place the stride 2 conv on the 1x1 filter
# Use True only for the original MSRA ResNet; use False for C2 and Torch models
_C.MODEL.RESNETS.STRIDE_IN_1X1 = True
# Residual transformation function
_C.MODEL.RESNETS.TRANS_FUNC = "BottleneckWithFixedBatchNorm"
# ResNet's stem function (conv1 and pool1)
_C.MODEL.RESNETS.STEM_FUNC = "StemWithFixedBatchNorm"
# Apply dilation in stage "res5"
_C.MODEL.RESNETS.RES5_DILATION = 1
# ---------------------------------------------------------------------------- #
# Solver
# ---------------------------------------------------------------------------- #
_C.SOLVER = CN()
_C.SOLVER.MAX_ITER = 40000
_C.SOLVER.BASE_LR = 0.001
_C.SOLVER.BIAS_LR_FACTOR = 2
_C.SOLVER.MOMENTUM = 0.9
_C.SOLVER.WEIGHT_DECAY = 0.0005
_C.SOLVER.WEIGHT_DECAY_BIAS = 0
_C.SOLVER.GAMMA = 0.1
_C.SOLVER.STEPS = (30000,)
_C.SOLVER.WARMUP_FACTOR = 1.0 / 3
_C.SOLVER.WARMUP_ITERS = 500
_C.SOLVER.WARMUP_METHOD = "linear"
# Paramters to accumulate across steps
_C.SOLVER.ACCUMULATE_STEPS = 1
_C.SOLVER.ACCUMULATE_GRAD = False
# ---------------------------------------------------------------------------- #
# Specific test options
# ---------------------------------------------------------------------------- #
_C.TEST = CN()
_C.TEST.EXPECTED_RESULTS = []
_C.TEST.EXPECTED_RESULTS_SIGMA_TOL = 4
# ---------------------------------------------------------------------------- #
# Misc options
# ---------------------------------------------------------------------------- #
_C.OUTPUT_DIR = "/results"
_C.CHECKPOINT = ""
_C.SAVE_CHECKPOINTS = False
_C.DO_ONLINE_MAP_EVAL = True
_C.PATHS_CATALOG = os.path.join(os.path.dirname(__file__), "paths_catalog.py")
| apache-2.0 | -2,606,254,840,766,577,000 | 38.777778 | 83 | 0.604706 | false |
ajaygarg84/sugar | src/jarabe/frame/activitiestray.py | 1 | 30803 | # Copyright (C) 2006-2007 Red Hat, Inc.
# Copyright (C) 2008 One Laptop Per Child
# Copyright (C) 2010 Collabora Ltd. <http://www.collabora.co.uk/>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from gettext import gettext as _
import tempfile
import os
from gi.repository import GObject
from gi.repository import GConf
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
from sugar3.graphics import style
from sugar3.graphics.tray import HTray
from sugar3.graphics.xocolor import XoColor
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.toolbutton import ToolButton
from sugar3.graphics.icon import Icon, get_icon_file_name
from sugar3.graphics.palette import Palette
from sugar3.graphics.menuitem import MenuItem
from sugar3.graphics.palettemenu import PaletteMenuBox
from sugar3.graphics.palettemenu import PaletteMenuItem
from sugar3.graphics.palettemenu import PaletteMenuItemSeparator
from sugar3.datastore import datastore
from sugar3 import mime
from sugar3 import env
from jarabe.model import shell
from jarabe.model import invites
from jarabe.model import bundleregistry
from jarabe.model import filetransfer
from jarabe.view.palettes import JournalPalette, CurrentActivityPalette
from jarabe.view.pulsingicon import PulsingIcon
from jarabe.frame.frameinvoker import FrameWidgetInvoker
from jarabe.frame.notification import NotificationIcon
import jarabe.frame
class ActivityButton(RadioToolButton):
def __init__(self, home_activity, group):
RadioToolButton.__init__(self, group=group)
self.set_palette_invoker(FrameWidgetInvoker(self))
self.palette_invoker.cache_palette = False
self._home_activity = home_activity
self._notify_launch_hid = None
self._icon = PulsingIcon()
self._icon.props.base_color = home_activity.get_icon_color()
self._icon.props.pulse_color = \
XoColor('%s,%s' % (style.COLOR_BUTTON_GREY.get_svg(),
style.COLOR_TOOLBAR_GREY.get_svg()))
if home_activity.get_icon_path():
self._icon.props.file = home_activity.get_icon_path()
else:
self._icon.props.icon_name = 'image-missing'
self.set_icon_widget(self._icon)
self._icon.show()
if home_activity.props.launch_status == shell.Activity.LAUNCHING:
self._icon.props.pulsing = True
self._notify_launch_hid = home_activity.connect( \
'notify::launch-status', self.__notify_launch_status_cb)
elif home_activity.props.launch_status == shell.Activity.LAUNCH_FAILED:
self._on_failed_launch()
def create_palette(self):
if self._home_activity.is_journal():
palette = JournalPalette(self._home_activity)
else:
palette = CurrentActivityPalette(self._home_activity)
palette.connect('done', self.__palette_item_selected_cb)
palette.set_group_id('frame')
self.set_palette(palette)
def __palette_item_selected_cb(self, widget):
frame = jarabe.frame.get_view()
frame.hide()
def _on_failed_launch(self):
# TODO http://bugs.sugarlabs.org/ticket/2007
pass
def __notify_launch_status_cb(self, home_activity, pspec):
home_activity.disconnect(self._notify_launch_hid)
self._notify_launch_hid = None
if home_activity.props.launch_status == shell.Activity.LAUNCH_FAILED:
self._on_failed_launch()
else:
self._icon.props.pulsing = False
class InviteButton(ToolButton):
"""Invite to shared activity"""
__gsignals__ = {
'remove-invite': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, invite):
ToolButton.__init__(self)
self._invite = invite
self.connect('clicked', self.__clicked_cb)
self.connect('destroy', self.__destroy_cb)
bundle_registry = bundleregistry.get_registry()
bundle = bundle_registry.get_bundle(invite.get_bundle_id())
self._icon = Icon()
self._icon.props.xo_color = invite.get_color()
if bundle is not None:
self._icon.props.file = bundle.get_icon()
else:
self._icon.props.icon_name = 'image-missing'
self.set_icon_widget(self._icon)
self._icon.show()
palette = InvitePalette(invite)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
palette.connect('remove-invite', self.__remove_invite_cb)
self.set_palette(palette)
self._notif_icon = NotificationIcon()
self._notif_icon.connect('button-release-event',
self.__button_release_event_cb)
self._notif_icon.props.xo_color = invite.get_color()
if bundle is not None:
self._notif_icon.props.icon_filename = bundle.get_icon()
else:
self._notif_icon.props.icon_name = 'image-missing'
frame = jarabe.frame.get_view()
frame.add_notification(self._notif_icon, Gtk.CornerType.TOP_LEFT)
def __button_release_event_cb(self, icon, event):
if self._notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self._notif_icon)
self._notif_icon = None
self._invite.join()
self.emit('remove-invite')
def __clicked_cb(self, button):
self.palette.popup(immediate=True, state=Palette.SECONDARY)
def __remove_invite_cb(self, palette):
self.emit('remove-invite')
def __destroy_cb(self, button):
if self._notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self._notif_icon)
self._notif_icon = None
class InvitePalette(Palette):
"""Palette for frame or notification icon for invites."""
__gsignals__ = {
'remove-invite': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, invite):
Palette.__init__(self, '')
self._invite = invite
menu_item = MenuItem(_('Join'), icon_name='dialog-ok')
menu_item.connect('activate', self.__join_activate_cb)
self.menu.append(menu_item)
menu_item.show()
menu_item = MenuItem(_('Decline'), icon_name='dialog-cancel')
menu_item.connect('activate', self.__decline_activate_cb)
self.menu.append(menu_item)
menu_item.show()
bundle_id = invite.get_bundle_id()
registry = bundleregistry.get_registry()
self._bundle = registry.get_bundle(bundle_id)
if self._bundle:
name = self._bundle.get_name()
else:
name = bundle_id
self.set_primary_text(GLib.markup_escape_text(name))
def __join_activate_cb(self, menu_item):
self._invite.join()
self.emit('remove-invite')
def __decline_activate_cb(self, menu_item):
self.emit('remove-invite')
class ActivitiesTray(HTray):
def __init__(self):
HTray.__init__(self)
self._buttons = {}
self._invite_to_item = {}
self._freeze_button_clicks = False
self._home_model = shell.get_model()
self._home_model.connect('activity-added', self.__activity_added_cb)
self._home_model.connect('activity-removed',
self.__activity_removed_cb)
self._home_model.connect('active-activity-changed',
self.__activity_changed_cb)
self._home_model.connect('tabbing-activity-changed',
self.__tabbing_activity_changed_cb)
self._invites = invites.get_instance()
for invite in self._invites:
self._add_invite(invite)
self._invites.connect('invite-added', self.__invite_added_cb)
self._invites.connect('invite-removed', self.__invite_removed_cb)
filetransfer.new_file_transfer.connect(self.__new_file_transfer_cb)
def __activity_added_cb(self, home_model, home_activity):
logging.debug('__activity_added_cb: %r', home_activity)
if self.get_children():
group = self.get_children()[0]
else:
group = None
button = ActivityButton(home_activity, group)
self.add_item(button)
self._buttons[home_activity] = button
button.connect('clicked', self.__activity_clicked_cb, home_activity)
button.show()
def __activity_removed_cb(self, home_model, home_activity):
logging.debug('__activity_removed_cb: %r', home_activity)
button = self._buttons[home_activity]
self.remove_item(button)
del self._buttons[home_activity]
def _activate_activity(self, home_activity):
button = self._buttons[home_activity]
self._freeze_button_clicks = True
button.props.active = True
self._freeze_button_clicks = False
self.scroll_to_item(button)
# Redraw immediately.
# The widget may not be realized yet, and then there is no window.
x11_window = self.get_window()
if x11_window:
x11_window.process_updates(True)
def __activity_changed_cb(self, home_model, home_activity):
logging.debug('__activity_changed_cb: %r', home_activity)
# Only select the new activity, if there is no tabbing activity.
if home_model.get_tabbing_activity() is None:
self._activate_activity(home_activity)
def __tabbing_activity_changed_cb(self, home_model, home_activity):
logging.debug('__tabbing_activity_changed_cb: %r', home_activity)
# If the tabbing_activity is set to None just do nothing.
# The active activity will be updated a bit later (and it will
# be set to the activity that is currently selected).
if home_activity is None:
return
self._activate_activity(home_activity)
def __activity_clicked_cb(self, button, home_activity):
if not self._freeze_button_clicks and button.props.active:
logging.debug('ActivitiesTray.__activity_clicked_cb')
window = home_activity.get_window()
if window:
window.activate(Gtk.get_current_event_time())
frame = jarabe.frame.get_view()
frame.hide()
def __remove_invite_cb(self, icon, invite):
self._invites.remove_invite(invite)
def __invite_added_cb(self, invites_model, invite):
self._add_invite(invite)
def __invite_removed_cb(self, invites_model, invite):
self._remove_invite(invite)
def _add_invite(self, invite):
"""Add an invite"""
item = InviteButton(invite)
item.connect('remove-invite', self.__remove_invite_cb, invite)
self.add_item(item)
item.show()
self._invite_to_item[invite] = item
def _remove_invite(self, invite):
self.remove_item(self._invite_to_item[invite])
self._invite_to_item[invite].destroy()
del self._invite_to_item[invite]
def __new_file_transfer_cb(self, **kwargs):
file_transfer = kwargs['file_transfer']
logging.debug('__new_file_transfer_cb %r', file_transfer)
if isinstance(file_transfer, filetransfer.IncomingFileTransfer):
button = IncomingTransferButton(file_transfer)
elif isinstance(file_transfer, filetransfer.OutgoingFileTransfer):
button = OutgoingTransferButton(file_transfer)
self.add_item(button)
button.show()
class BaseTransferButton(ToolButton):
"""Button with a notification attached
"""
def __init__(self, file_transfer):
ToolButton.__init__(self)
self.file_transfer = file_transfer
file_transfer.connect('notify::state', self.__notify_state_cb)
icon = Icon()
self.props.icon_widget = icon
icon.show()
self.notif_icon = NotificationIcon()
self.notif_icon.connect('button-release-event',
self.__button_release_event_cb)
self.connect('clicked', self.__button_clicked_cb)
def __button_release_event_cb(self, icon, event):
if self.notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self.notif_icon)
self.notif_icon = None
def __button_clicked_cb(self, button):
self.palette.popup(immediate=True, state=Palette.SECONDARY)
def remove(self):
frame = jarabe.frame.get_view()
frame.remove_notification(self.notif_icon)
self.props.parent.remove(self)
def __notify_state_cb(self, file_transfer, pspec):
logging.debug('_update state: %r %r', file_transfer.props.state,
file_transfer.reason_last_change)
if file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
if file_transfer.reason_last_change == \
filetransfer.FT_REASON_LOCAL_STOPPED:
self.remove()
class IncomingTransferButton(BaseTransferButton):
"""UI element representing an ongoing incoming file transfer
"""
def __init__(self, file_transfer):
BaseTransferButton.__init__(self, file_transfer)
self._ds_object = datastore.create()
file_transfer.connect('notify::state', self.__notify_state_cb)
file_transfer.connect('notify::transferred-bytes',
self.__notify_transferred_bytes_cb)
icons = Gio.content_type_get_icon(file_transfer.mime_type).props.names
icons.append('application-octet-stream')
for icon_name in icons:
icon_name = 'transfer-from-%s' % icon_name
file_name = get_icon_file_name(icon_name)
if file_name is not None:
self.props.icon_widget.props.icon_name = icon_name
self.notif_icon.props.icon_name = icon_name
break
icon_color = file_transfer.buddy.props.color
self.props.icon_widget.props.xo_color = icon_color
self.notif_icon.props.xo_color = icon_color
frame = jarabe.frame.get_view()
frame.add_notification(self.notif_icon,
Gtk.CornerType.TOP_LEFT)
def create_palette(self):
palette = IncomingTransferPalette(self.file_transfer)
palette.connect('dismiss-clicked', self.__dismiss_clicked_cb)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
return palette
def __notify_state_cb(self, file_transfer, pspec):
if file_transfer.props.state == filetransfer.FT_STATE_OPEN:
logging.debug('__notify_state_cb OPEN')
self._ds_object.metadata['title'] = file_transfer.title
self._ds_object.metadata['description'] = file_transfer.description
self._ds_object.metadata['progress'] = '0'
self._ds_object.metadata['keep'] = '0'
self._ds_object.metadata['buddies'] = ''
self._ds_object.metadata['preview'] = ''
self._ds_object.metadata['icon-color'] = \
file_transfer.buddy.props.color.to_string()
self._ds_object.metadata['mime_type'] = file_transfer.mime_type
elif file_transfer.props.state == filetransfer.FT_STATE_COMPLETED:
logging.debug('__notify_state_cb COMPLETED')
self._ds_object.metadata['progress'] = '100'
self._ds_object.file_path = file_transfer.destination_path
datastore.write(self._ds_object, transfer_ownership=True,
reply_handler=self.__reply_handler_cb,
error_handler=self.__error_handler_cb)
elif file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
logging.debug('__notify_state_cb CANCELLED')
object_id = self._ds_object.object_id
if object_id is not None:
self._ds_object.destroy()
datastore.delete(object_id)
self._ds_object = None
def __notify_transferred_bytes_cb(self, file_transfer, pspec):
progress = file_transfer.props.transferred_bytes / \
file_transfer.file_size
self._ds_object.metadata['progress'] = str(progress * 100)
datastore.write(self._ds_object, update_mtime=False)
def __reply_handler_cb(self):
logging.debug('__reply_handler_cb %r', self._ds_object.object_id)
def __error_handler_cb(self, error):
logging.debug('__error_handler_cb %r %s', self._ds_object.object_id,
error)
def __dismiss_clicked_cb(self, palette):
self.remove()
class OutgoingTransferButton(BaseTransferButton):
"""UI element representing an ongoing outgoing file transfer
"""
def __init__(self, file_transfer):
BaseTransferButton.__init__(self, file_transfer)
icons = Gio.content_type_get_icon(file_transfer.mime_type).props.names
icons.append('application-octet-stream')
for icon_name in icons:
icon_name = 'transfer-to-%s' % icon_name
file_name = get_icon_file_name(icon_name)
if file_name is not None:
self.props.icon_widget.props.icon_name = icon_name
self.notif_icon.props.icon_name = icon_name
break
client = GConf.Client.get_default()
icon_color = XoColor(client.get_string('/desktop/sugar/user/color'))
self.props.icon_widget.props.xo_color = icon_color
self.notif_icon.props.xo_color = icon_color
frame = jarabe.frame.get_view()
frame.add_notification(self.notif_icon,
Gtk.CornerType.TOP_LEFT)
def create_palette(self):
palette = OutgoingTransferPalette(self.file_transfer)
palette.connect('dismiss-clicked', self.__dismiss_clicked_cb)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
return palette
def __dismiss_clicked_cb(self, palette):
self.remove()
class BaseTransferPalette(Palette):
"""Base palette class for frame or notification icon for file transfers
"""
__gtype_name__ = 'SugarBaseTransferPalette'
__gsignals__ = {
'dismiss-clicked': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, file_transfer):
Palette.__init__(self, GLib.markup_escape_text(file_transfer.title))
self.file_transfer = file_transfer
self.progress_bar = None
self.progress_label = None
self._notify_transferred_bytes_handler = None
self.connect('popup', self.__popup_cb)
self.connect('popdown', self.__popdown_cb)
def __popup_cb(self, palette):
self.update_progress()
self._notify_transferred_bytes_handler = \
self.file_transfer.connect('notify::transferred_bytes',
self.__notify_transferred_bytes_cb)
def __popdown_cb(self, palette):
if self._notify_transferred_bytes_handler is not None:
self.file_transfer.disconnect(
self._notify_transferred_bytes_handler)
self._notify_transferred_bytes_handler = None
def __notify_transferred_bytes_cb(self, file_transfer, pspec):
self.update_progress()
def _format_size(self, size):
if size < 1024:
return _('%dB') % size
elif size < 1048576:
return _('%dKB') % (size / 1024)
else:
return _('%dMB') % (size / 1048576)
def update_progress(self):
logging.debug('update_progress: %r',
self.file_transfer.props.transferred_bytes)
if self.progress_bar is None:
return
self.progress_bar.props.fraction = \
self.file_transfer.props.transferred_bytes / \
float(self.file_transfer.file_size)
logging.debug('update_progress: %r', self.progress_bar.props.fraction)
transferred = self._format_size(
self.file_transfer.props.transferred_bytes)
total = self._format_size(self.file_transfer.file_size)
# TRANS: file transfer, bytes transferred, e.g. 128 of 1024
self.progress_label.props.label = _('%s of %s') % (transferred, total)
class IncomingTransferPalette(BaseTransferPalette):
"""Palette for frame or notification icon for incoming file transfers
"""
__gtype_name__ = 'SugarIncomingTransferPalette'
def __init__(self, file_transfer):
BaseTransferPalette.__init__(self, file_transfer)
self.file_transfer.connect('notify::state', self.__notify_state_cb)
nick = str(self.file_transfer.buddy.props.nick)
label = GLib.markup_escape_text(_('Transfer from %s') % (nick,))
self.props.secondary_text = label
self._update()
def __notify_state_cb(self, file_transfer, pspec):
self._update()
def _update(self):
box = PaletteMenuBox()
self.set_content(box)
box.show()
logging.debug('_update state: %r', self.file_transfer.props.state)
if self.file_transfer.props.state == filetransfer.FT_STATE_PENDING:
menu_item = PaletteMenuItem(_('Accept'))
icon = Icon(icon_name='dialog-ok', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__accept_activate_cb)
box.append_item(menu_item)
menu_item.show()
menu_item = PaletteMenuItem(_('Decline'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__decline_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
if self.file_transfer.description:
label = Gtk.Label(label=self.file_transfer.description)
inner_box.add(label)
label.show()
mime_type = self.file_transfer.mime_type
type_description = mime.get_mime_description(mime_type)
size = self._format_size(self.file_transfer.file_size)
label = Gtk.Label(label='%s (%s)' % (size, type_description))
inner_box.add(label)
label.show()
elif self.file_transfer.props.state in \
[filetransfer.FT_STATE_ACCEPTED, filetransfer.FT_STATE_OPEN]:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
self.progress_bar = Gtk.ProgressBar()
inner_box.add(self.progress_bar)
self.progress_bar.show()
self.progress_label = Gtk.Label(label='')
inner_box.add(self.progress_label)
self.progress_label.show()
self.update_progress()
elif self.file_transfer.props.state == filetransfer.FT_STATE_COMPLETED:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
self.update_progress()
elif self.file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
if self.file_transfer.reason_last_change == \
filetransfer.FT_REASON_REMOTE_STOPPED:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel',
icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
text = _('The other participant canceled the file transfer')
label = Gtk.Label(label=text)
inner_box.add(label)
label.show()
def __accept_activate_cb(self, menu_item):
#TODO: figure out the best place to get rid of that temp file
extension = mime.get_primary_extension(self.file_transfer.mime_type)
if extension is None:
extension = '.bin'
fd, file_path = tempfile.mkstemp(suffix=extension,
prefix=self._sanitize(self.file_transfer.title),
dir=os.path.join(env.get_profile_path(), 'data'))
os.close(fd)
os.unlink(file_path)
self.file_transfer.accept(file_path)
def _sanitize(self, file_name):
file_name = file_name.replace('/', '_')
file_name = file_name.replace('.', '_')
file_name = file_name.replace('?', '_')
return file_name
def __decline_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __cancel_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __dismiss_activate_cb(self, menu_item):
self.emit('dismiss-clicked')
class OutgoingTransferPalette(BaseTransferPalette):
"""Palette for frame or notification icon for outgoing file transfers
"""
__gtype_name__ = 'SugarOutgoingTransferPalette'
def __init__(self, file_transfer):
BaseTransferPalette.__init__(self, file_transfer)
self.progress_bar = None
self.progress_label = None
self.file_transfer.connect('notify::state', self.__notify_state_cb)
nick = str(file_transfer.buddy.props.nick)
label = GLib.markup_escape_text(_('Transfer to %s') % (nick,))
self.props.secondary_text = label
self._update()
def __notify_state_cb(self, file_transfer, pspec):
self._update()
def _update(self):
new_state = self.file_transfer.props.state
logging.debug('_update state: %r', new_state)
box = PaletteMenuBox()
self.set_content(box)
box.show()
if new_state == filetransfer.FT_STATE_PENDING:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
if self.file_transfer.description:
label = Gtk.Label(label=self.file_transfer.description)
inner_box.add(label)
label.show()
mime_type = self.file_transfer.mime_type
type_description = mime.get_mime_description(mime_type)
size = self._format_size(self.file_transfer.file_size)
label = Gtk.Label(label='%s (%s)' % (size, type_description))
inner_box.add(label)
label.show()
elif new_state in [filetransfer.FT_STATE_ACCEPTED,
filetransfer.FT_STATE_OPEN]:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
self.progress_bar = Gtk.ProgressBar()
inner_box.add(self.progress_bar)
self.progress_bar.show()
self.progress_label = Gtk.Label(label='')
inner_box.add(self.progress_label)
self.progress_label.show()
self.update_progress()
elif new_state in [filetransfer.FT_STATE_COMPLETED,
filetransfer.FT_STATE_CANCELLED]:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
self.update_progress()
def __cancel_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __dismiss_activate_cb(self, menu_item):
self.emit('dismiss-clicked')
| gpl-2.0 | -8,059,462,500,647,446,000 | 36.610501 | 79 | 0.610103 | false |
explora26/zephyr | doc/conf.py | 1 | 14566 | # -*- coding: utf-8 -*-
#
# Zephyr documentation build configuration file, created by
# sphinx-quickstart on Fri May 8 11:43:01 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from subprocess import CalledProcessError, check_output, DEVNULL
if "ZEPHYR_BASE" not in os.environ:
sys.exit("$ZEPHYR_BASE environment variable undefined.")
ZEPHYR_BASE = os.path.abspath(os.environ["ZEPHYR_BASE"])
if "ZEPHYR_BUILD" not in os.environ:
sys.exit("$ZEPHYR_BUILD environment variable undefined.")
ZEPHYR_BUILD = os.path.abspath(os.environ["ZEPHYR_BUILD"])
# Add the 'extensions' directory to sys.path, to enable finding Sphinx
# extensions within.
sys.path.insert(0, os.path.join(ZEPHYR_BASE, 'doc', 'extensions'))
# Add the directory which contains the runners package as well,
# for autodoc directives on runners.xyz.
sys.path.insert(0, os.path.join(ZEPHYR_BASE, 'scripts', 'west_commands'))
west_found = False
try:
desc = check_output(['west', 'list', '-f{abspath}', 'west'],
stderr=DEVNULL,
cwd=os.path.dirname(__file__))
west_path = desc.decode(sys.getdefaultencoding()).strip()
# Add west, to be able to pull in its API docs.
sys.path.append(os.path.join(west_path, 'src'))
west_found = True
except FileNotFoundError as e:
# west not installed
pass
except CalledProcessError as e:
# west not able to list itself
pass
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'breathe', 'sphinx.ext.todo',
'sphinx.ext.extlinks',
'sphinx.ext.autodoc',
'zephyr.application',
'zephyr.html_redirects',
'only.eager_only',
'zephyr.link-roles'
]
# Only use SVG converter when it is really needed, e.g. LaTeX.
if tags.has("svgconvert"):
extensions.append('sphinxcontrib.rsvgconverter')
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Zephyr Project'
copyright = u'2015-2019 Zephyr Project members and individual contributors'
author = u'The Zephyr Project'
# The following code tries to extract the information by reading the Makefile,
# when Sphinx is run directly (e.g. by Read the Docs).
try:
version_major = None
version_minor = None
patchlevel = None
extraversion = None
for line in open(os.path.join(ZEPHYR_BASE, 'VERSION')):
key, val = [x.strip() for x in line.split('=', 2)]
if key == 'VERSION_MAJOR':
version_major = val
if key == 'VERSION_MINOR':
version_minor = val
elif key == 'PATCHLEVEL':
patchlevel = val
elif key == 'EXTRAVERSION':
extraversion = val
if version_major and version_minor and patchlevel and extraversion:
break
except:
pass
finally:
if version_major and version_minor and patchlevel and extraversion is not None :
version = release = version_major + '.' + version_minor + '.' + patchlevel
if extraversion != '':
version = release = version + '-' + extraversion
else:
sys.stderr.write('Warning: Could not extract kernel version\n')
version = release = "unknown version"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
if not west_found:
exclude_patterns.append('**/*west-apis*')
else:
exclude_patterns.append('**/*west-not-found*')
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Additional lexer for Pygments (syntax highlighting)
from lexer.DtsLexer import DtsLexer
from sphinx.highlighting import lexers
lexers['DTS'] = DtsLexer()
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
rst_epilog = """
.. include:: /substitutions.txt
"""
# -- Options for HTML output ----------------------------------------------
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
if tags.has('release'):
is_release = True
docs_title = 'Docs / %s' %(version)
else:
is_release = False
docs_title = 'Docs / Latest'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Zephyr Project Documentation"
# This value determines the text for the permalink; it defaults to "¶".
# Set it to None or the empty string to disable permalinks.
#html_add_permalinks = ""
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'images/Zephyr-Kite-logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'images/zp_favicon.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['{}/doc/static'.format(ZEPHYR_BASE)]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants =
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = True
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = tags.has('development')
# If true, license is shown in the HTML footer. Default is True.
html_show_license = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
sourcelink_suffix = '.txt'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'zephyrdoc'
# Custom added feature to allow redirecting old URLs
#
# list of tuples (old_url, new_url) for pages to redirect
# (URLs should be relative to document root, only)
html_redirect_pages = [
('contribute/contribute_guidelines', 'contribute/index'),
('application/application', 'application/index.rst'),
('security/security', 'security/index'),
('boards/boards', 'boards/index'),
('samples/samples', 'samples/index'),
('releases/release-notes', 'releases/index'),
('getting_started/getting_starting', 'getting_started/index'),
('introduction/introducing_zephyr', 'introduction/index'),
('api/index', 'reference/index'),
('api/api', 'reference/index'),
('subsystems/subsystems', 'reference/index'),
('kernel/kernel', 'reference/kernel/index'),
]
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': '\setcounter{tocdepth}{2}',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'zephyr.tex', u'Zephyr Project Documentation',
u'many', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'zephyr', u'Zephyr Project Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'zephyr', u'Zephyr Project Documentation',
author, 'Zephyr', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
breathe_projects = {
"Zephyr": "{}/doxygen/xml".format(ZEPHYR_BUILD),
"doc-examples": "{}/doxygen/xml".format(ZEPHYR_BUILD)
}
breathe_default_project = "Zephyr"
# Qualifiers to a function are causing Sphihx/Breathe to warn about
# Error when parsing function declaration and more. This is a list
# of strings that the parser additionally should accept as
# attributes.
cpp_id_attributes = ['__syscall', '__syscall_inline', '__deprecated',
'__may_alias', '__used', '__unused', '__weak',
'__DEPRECATED_MACRO', 'FUNC_NORETURN' ]
# docs_title is used in the breadcrumb title in the zephyr docs theme
html_context = {
'show_license': html_show_license,
'docs_title': docs_title,
'is_release': is_release,
'theme_logo_only': False,
'current_version': version,
'versions': ( ("latest", "/"),
("1.13.0", "/1.13.0/"),
("1.12.0", "/1.12.0/"),
("1.11.0", "/1.11.0/"),
("1.10.0", "/1.10.0/"),
("1.9.2", "/1.9.0/"),
)
}
extlinks = {'jira': ('https://jira.zephyrproject.org/browse/%s', ''),
'github': ('https://github.com/zephyrproject-rtos/zephyr/issues/%s', '')
}
# some configuration for linkcheck builder
# noticed that we're getting false-positive link errors on JIRA, I suspect
# because it's taking too long for the server to respond so bump up the
# timeout (default=5) and turn off anchor checks (so only a HEAD request is
# done - much faster) Leave the ignore commented in case we want to remove
# jira link checks later...
linkcheck_timeout = 30
linkcheck_workers = 10
# linkcheck_ignore = [r'https://jira\.zephyrproject\.org/']
linkcheck_anchors = False
def setup(app):
app.add_stylesheet("zephyr-custom.css")
app.add_javascript("zephyr-custom.js")
| apache-2.0 | -3,894,528,861,728,211,000 | 32.405963 | 84 | 0.682595 | false |
0xbc/pyvex | setup.py | 1 | 5029 | # pylint: disable=no-name-in-module,import-error
import os
import urllib2
import subprocess
import sys
import shutil
import glob
import tarfile
import multiprocessing
import platform
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
from distutils.util import get_platform
from distutils.errors import LibError
from distutils.command.build import build as _build
if sys.platform in ('win32', 'cygwin'):
LIBRARY_FILE = 'pyvex.dll'
STATIC_LIBRARY_FILE = 'pyvex.lib'
elif sys.platform == 'darwin':
LIBRARY_FILE = "libpyvex.dylib"
STATIC_LIBRARY_FILE = 'libpyvex.a'
else:
LIBRARY_FILE = "libpyvex.so"
STATIC_LIBRARY_FILE = 'libpyvex.a'
VEX_LIB_NAME = "vex" # can also be vex-amd64-linux
VEX_PATH = os.path.join('..', 'vex')
if not os.path.exists(VEX_PATH):
VEX_URL = 'https://github.com/angr/vex/archive/master.tar.gz'
with open('master.tar.gz', 'wb') as v:
v.write(urllib2.urlopen(VEX_URL).read())
with tarfile.open('master.tar.gz') as tar:
tar.extractall()
VEX_PATH='vex-master'
def _build_vex():
e = os.environ.copy()
e['MULTIARCH'] = '1'
e['DEBUG'] = '1'
cmd1 = ['nmake', '/f', 'Makefile-msvc', 'all']
cmd2 = ['make', '-f', 'Makefile-gcc', '-j', str(multiprocessing.cpu_count()), 'all']
for cmd in (cmd1, cmd2):
try:
if subprocess.call(cmd, cwd=VEX_PATH, env=e) == 0:
break
except OSError:
continue
else:
raise LibError("Unable to build libVEX.")
def _build_pyvex():
e = os.environ.copy()
e['VEX_LIB_PATH'] = os.path.join('..', VEX_PATH)
e['VEX_INCLUDE_PATH'] = os.path.join('..', VEX_PATH, 'pub')
e['VEX_LIB_FILE'] = os.path.join('..', VEX_PATH, 'libvex.lib')
cmd1 = ['nmake', '/f', 'Makefile-msvc']
cmd2 = ['make', '-j', str(multiprocessing.cpu_count())]
for cmd in (cmd1, cmd2):
try:
if subprocess.call(cmd, cwd='pyvex_c', env=e) == 0:
break
except OSError as err:
continue
else:
raise LibError("Unable to build libpyvex.")
def _shuffle_files():
shutil.rmtree('pyvex/lib', ignore_errors=True)
shutil.rmtree('pyvex/include', ignore_errors=True)
os.mkdir('pyvex/lib')
os.mkdir('pyvex/include')
shutil.copy(os.path.join('pyvex_c', LIBRARY_FILE), 'pyvex/lib')
shutil.copy(os.path.join('pyvex_c', STATIC_LIBRARY_FILE), 'pyvex/lib')
shutil.copy('pyvex_c/pyvex.h', 'pyvex/include')
for f in glob.glob(os.path.join(VEX_PATH, 'pub', '*')):
shutil.copy(f, 'pyvex/include')
def _build_ffi():
import make_ffi
try:
make_ffi.doit(os.path.join(VEX_PATH,'pub'))
except Exception as e:
print repr(e)
raise
class build(_build):
def run(self):
self.execute(_build_vex, (), msg="Building libVEX")
self.execute(_build_pyvex, (), msg="Building libpyvex")
self.execute(_shuffle_files, (), msg="Copying libraries and headers")
self.execute(_build_ffi, (), msg="Creating CFFI defs file")
_build.run(self)
cmdclass = { 'build': build }
try:
from setuptools.command.develop import develop as _develop
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class develop(_develop):
def run(self):
self.execute(_build_vex, (), msg="Building libVEX")
self.execute(_build_pyvex, (), msg="Building libpyvex")
self.execute(_shuffle_files, (), msg="Copying libraries and headers")
self.execute(_build_ffi, (), msg="Creating CFFI defs file")
_develop.run(self)
cmdclass['develop'] = develop
class bdist_egg(_bdist_egg):
def run(self):
self.run_command('build')
_bdist_egg.run(self)
cmdclass['bdist_egg'] = bdist_egg
except ImportError:
print "Proper 'develop' support unavailable."
if 'bdist_wheel' in sys.argv and '--plat-name' not in sys.argv:
sys.argv.append('--plat-name')
name = get_platform()
if 'linux' in name:
# linux_* platform tags are disallowed because the python ecosystem is fubar
# linux builds should be built in the centos 5 vm for maximum compatibility
sys.argv.append('manylinux1_' + platform.machine())
else:
# https://www.python.org/dev/peps/pep-0425/
sys.argv.append(name.replace('.', '_').replace('-', '_'))
setup(
name="pyvex", version='6.7.3.26', description="A Python interface to libVEX and VEX IR.",
packages=['pyvex', 'pyvex.lift', 'pyvex.lift.util'],
cmdclass=cmdclass,
install_requires=[ 'pycparser', 'cffi>=1.0.3', 'archinfo' ],
setup_requires=[ 'pycparser', 'cffi>=1.0.3' ],
include_package_data=True,
package_data={
'pyvex': ['lib/*', 'include/*']
}
)
| bsd-2-clause | 5,826,168,124,604,355,000 | 32.751678 | 140 | 0.616624 | false |
eonpatapon/contrail-controller | src/config/fabric-ansible/job_manager/job_messages.py | 1 | 9479 | class MsgBundle(object):
JOB_TEMPLATE_MISSING = 1,
JOB_EXECUTION_ID_MISSING = 2,
JOB_SUMMARY_MESSAGE_HDR = 3,
JOB_RESULT_STATUS_NONE = 4,
JOB_MULTI_DEVICE_FAILED_MESSAGE_HDR = 5,
JOB_SINGLE_DEVICE_FAILED_MESSAGE_HDR = 6,
PLAYBOOK_RESULTS_MESSAGE = 7,
PLAYBOOK_EXIT_WITH_ERROR = 8,
PLAYBOOK_RETURN_WITH_ERROR = 9,
NO_PLAYBOOK_INPUT_DATA = 10,
SANDESH_INITIALIZATION_TIMEOUT_ERROR = 11,
INPUT_SCHEMA_INPUT_NOT_FOUND = 12,
DEVICE_JSON_NOT_FOUND = 13,
NO_DEVICE_DATA_FOUND = 14,
NO_CREDENTIALS_FOUND = 15,
INVALID_SCHEMA = 16,
SEND_JOB_LOG_ERROR = 17,
SEND_JOB_EXC_UVE_ERROR = 18,
PLAYBOOK_INPUT_PARSING_ERROR = 19,
PLAYBOOK_EXECUTE_ERROR = 20,
CREATE_JOB_SUMMARY_ERROR = 21,
DEVICE_VENDOR_FAMILY_MISSING = 22,
READ_JOB_TEMPLATE_ERROR = 23,
GET_PLAYBOOK_INFO_ERROR = 24,
PLAYBOOK_NOT_FOUND = 25,
PLAYBOOK_INFO_DEVICE_MISMATCH = 26,
RUN_PLAYBOOK_PROCESS_ERROR = 27,
RUN_PLAYBOOK_ERROR = 28,
SEND_PROUTER_OBJECT_LOG_ERROR = 29,
CLOSE_SANDESH_EXCEPTION = 30,
RUN_PLAYBOOK_PROCESS_TIMEOUT = 31,
PLAYBOOK_EXECUTION_COMPLETE = 32,
START_JOB_MESSAGE = 33,
VNC_INITIALIZATION_ERROR = 34,
JOB_ERROR = 35,
JOB_EXECUTION_COMPLETE = 36,
START_EXE_PB_MSG = 37,
STOP_EXE_PB_MSG = 38,
JOB_EXC_REC_HDR = 39,
EXC_JOB_ERR_HDR = 40,
PLAYBOOK_STATUS_FAILED = 41,
PLAYBOOK_OUTPUT_MISSING = 42,
EMPTY_DEVICE_LIST = 43,
PRODUCT_NAME_MISSING = 44
_msgs = {
'en': {
JOB_TEMPLATE_MISSING: 'job_template_id is missing '
'in the job input',
JOB_EXECUTION_ID_MISSING: 'job_execution_id is missing'
' in the job input',
JOB_SUMMARY_MESSAGE_HDR: 'Job summary: ',
JOB_RESULT_STATUS_NONE: 'Error in getting the '
'job completion '
'status after job execution. \n',
JOB_MULTI_DEVICE_FAILED_MESSAGE_HDR: 'Job failed with '
'for devices: ',
JOB_SINGLE_DEVICE_FAILED_MESSAGE_HDR: 'Job failed. \n',
PLAYBOOK_RESULTS_MESSAGE: 'Detailed job results: \n',
PLAYBOOK_EXIT_WITH_ERROR: 'Playbook "{playbook_uri}" exited'
' with error.',
PLAYBOOK_RETURN_WITH_ERROR: 'Playbook returned '
'with error',
PLAYBOOK_STATUS_FAILED: 'Playbook completed with status Failure.',
PLAYBOOK_OUTPUT_MISSING: 'Playbook completed without sending the'
'output with status details.',
NO_PLAYBOOK_INPUT_DATA: 'Playbook input data'
' is not passed. '
'Aborting execution.',
SANDESH_INITIALIZATION_TIMEOUT_ERROR: 'Sandesh '
'initialization '
'timeout after 15s',
INPUT_SCHEMA_INPUT_NOT_FOUND: 'Required: input paramater'
' in execute-job',
DEVICE_JSON_NOT_FOUND: 'No Device details found for'
' any device',
NO_DEVICE_DATA_FOUND: 'Device details for the device '
'"{device_id}" not found',
NO_CREDENTIALS_FOUND: 'Discovered device "{device_id}" '
'does not have credentials',
INVALID_SCHEMA: 'Error while validating input schema'
' for job template "{job_template_id}" '
': {exc_obj.message}',
SEND_JOB_LOG_ERROR: 'Error while creating the job'
' log for job template '
'"{job_template_fqname}" '
'and execution id "{job_execution_id}"'
' : {exc_msg}',
SEND_JOB_EXC_UVE_ERROR: 'Error while sending the job'
' execution UVE for job '
'template "{job_template_fqname}"'
' and execution id '
'"{job_execution_id}" : {exc_msg}',
PLAYBOOK_INPUT_PARSING_ERROR: 'Exiting due playbook'
' input parsing error:'
' {exc_msg}',
PLAYBOOK_EXECUTE_ERROR: 'Exception in playbook process'
' for playbook "{playbook_uri}" '
'(exec_id: {execution_id}): {exc_msg} ',
CREATE_JOB_SUMMARY_ERROR: 'Error while generating the'
' job summary message'
' : {exc_msg}',
DEVICE_VENDOR_FAMILY_MISSING: 'device_vendor or '
'device_family not found'
' for "{device_id}"',
PRODUCT_NAME_MISSING: 'device_product name not found '
' for "{device_id}"',
READ_JOB_TEMPLATE_ERROR: 'Error while reading the '
'job template "{job_template_id}"'
' from database',
GET_PLAYBOOK_INFO_ERROR: 'Error while getting the playbook'
' information from the job'
' template "{job_template_id}"'
' : {exc_msg}',
PLAYBOOK_NOT_FOUND: 'Playbook "{playbook_uri}" '
'does not exist',
PLAYBOOK_INFO_DEVICE_MISMATCH: 'Playbook info not found'
' in the job template'
' for "{device_vendor}"'
' and "{device_family}"',
RUN_PLAYBOOK_PROCESS_ERROR: 'Exception in executing '
'the playbook '
'for "{playbook_uri}"'
' : {exc_msg}',
RUN_PLAYBOOK_ERROR: 'Error while executing the playbook'
' "{playbook_uri}" : {exc_msg}',
SEND_PROUTER_OBJECT_LOG_ERROR: 'Error while creating '
'prouter object log'
' for router '
'"{prouter_fqname}" '
'and execution id '
'"{job_execution_id}"'
' : {exc_msg}',
CLOSE_SANDESH_EXCEPTION: 'Error in confirming the'
' SANDESH message send operation.'
' The Job Logs might '
'not be complete.',
RUN_PLAYBOOK_PROCESS_TIMEOUT: 'Timeout while executing'
' the playbook '
'for "{playbook_uri}" : '
'{exc_msg}. Playbook'
' process is aborted.',
PLAYBOOK_EXECUTION_COMPLETE: 'Completed playbook execution'
' for job template '
'"{job_template_name}" with '
'execution'
' id "{job_execution_id}"',
START_JOB_MESSAGE: 'Starting execution for job '
'template "{job_template_name}"'
' and execution id "{job_execution_id}"',
VNC_INITIALIZATION_ERROR: 'Exiting due to vnc api '
'initialization error: {exc_msg}',
JOB_ERROR: 'Exiting job due to error: {exc_msg} ',
JOB_EXECUTION_COMPLETE: 'Job execution completed '
'successfully.',
START_EXE_PB_MSG: 'Starting to execute the '
'playbook "{playbook_name}"',
STOP_EXE_PB_MSG: 'Finished executing the '
'playbook "{playbook_name}"',
JOB_EXC_REC_HDR: 'Job Exception recieved: ',
EXC_JOB_ERR_HDR: 'Error while executing job ',
EMPTY_DEVICE_LIST: 'Need to pass a valid device list '
}
}
@classmethod
def getMessage(cls, msg_id, locale='en', *args, **kwargs):
if locale not in MsgBundle._msgs:
return 'Failed to construct job message due to invalid '\
'locale: %s' % locale
if msg_id not in MsgBundle._msgs[locale]:
return 'Failed to construct job message due to invalid '\
'message id: %s' % msg_id
try:
return MsgBundle._msgs[locale][msg_id].format(*args, **kwargs)
except KeyError as ex:
return 'Failed to construct job message due to missing message '\
'arguments: %s' % ex.message
| apache-2.0 | -8,469,327,516,854,512,000 | 51.661111 | 78 | 0.450364 | false |
vitchyr/rlkit | rlkit/torch/sac/sac.py | 1 | 8191 | from collections import OrderedDict, namedtuple
from typing import Tuple
import numpy as np
import torch
import torch.optim as optim
from rlkit.core.loss import LossFunction, LossStatistics
from torch import nn as nn
import rlkit.torch.pytorch_util as ptu
from rlkit.core.eval_util import create_stats_ordered_dict
from rlkit.torch.torch_rl_algorithm import TorchTrainer
from rlkit.core.logging import add_prefix
import gtimer as gt
SACLosses = namedtuple(
'SACLosses',
'policy_loss qf1_loss qf2_loss alpha_loss',
)
class SACTrainer(TorchTrainer, LossFunction):
def __init__(
self,
env,
policy,
qf1,
qf2,
target_qf1,
target_qf2,
discount=0.99,
reward_scale=1.0,
policy_lr=1e-3,
qf_lr=1e-3,
optimizer_class=optim.Adam,
soft_target_tau=1e-2,
target_update_period=1,
plotter=None,
render_eval_paths=False,
use_automatic_entropy_tuning=True,
target_entropy=None,
):
super().__init__()
self.env = env
self.policy = policy
self.qf1 = qf1
self.qf2 = qf2
self.target_qf1 = target_qf1
self.target_qf2 = target_qf2
self.soft_target_tau = soft_target_tau
self.target_update_period = target_update_period
self.use_automatic_entropy_tuning = use_automatic_entropy_tuning
if self.use_automatic_entropy_tuning:
if target_entropy is None:
# Use heuristic value from SAC paper
self.target_entropy = -np.prod(
self.env.action_space.shape).item()
else:
self.target_entropy = target_entropy
self.log_alpha = ptu.zeros(1, requires_grad=True)
self.alpha_optimizer = optimizer_class(
[self.log_alpha],
lr=policy_lr,
)
self.plotter = plotter
self.render_eval_paths = render_eval_paths
self.qf_criterion = nn.MSELoss()
self.vf_criterion = nn.MSELoss()
self.policy_optimizer = optimizer_class(
self.policy.parameters(),
lr=policy_lr,
)
self.qf1_optimizer = optimizer_class(
self.qf1.parameters(),
lr=qf_lr,
)
self.qf2_optimizer = optimizer_class(
self.qf2.parameters(),
lr=qf_lr,
)
self.discount = discount
self.reward_scale = reward_scale
self._n_train_steps_total = 0
self._need_to_update_eval_statistics = True
self.eval_statistics = OrderedDict()
def train_from_torch(self, batch):
gt.blank_stamp()
losses, stats = self.compute_loss(
batch,
skip_statistics=not self._need_to_update_eval_statistics,
)
"""
Update networks
"""
if self.use_automatic_entropy_tuning:
self.alpha_optimizer.zero_grad()
losses.alpha_loss.backward()
self.alpha_optimizer.step()
self.policy_optimizer.zero_grad()
losses.policy_loss.backward()
self.policy_optimizer.step()
self.qf1_optimizer.zero_grad()
losses.qf1_loss.backward()
self.qf1_optimizer.step()
self.qf2_optimizer.zero_grad()
losses.qf2_loss.backward()
self.qf2_optimizer.step()
self._n_train_steps_total += 1
self.try_update_target_networks()
if self._need_to_update_eval_statistics:
self.eval_statistics = stats
# Compute statistics using only one batch per epoch
self._need_to_update_eval_statistics = False
gt.stamp('sac training', unique=False)
def try_update_target_networks(self):
if self._n_train_steps_total % self.target_update_period == 0:
self.update_target_networks()
def update_target_networks(self):
ptu.soft_update_from_to(
self.qf1, self.target_qf1, self.soft_target_tau
)
ptu.soft_update_from_to(
self.qf2, self.target_qf2, self.soft_target_tau
)
def compute_loss(
self,
batch,
skip_statistics=False,
) -> Tuple[SACLosses, LossStatistics]:
rewards = batch['rewards']
terminals = batch['terminals']
obs = batch['observations']
actions = batch['actions']
next_obs = batch['next_observations']
"""
Policy and Alpha Loss
"""
dist = self.policy(obs)
new_obs_actions, log_pi = dist.rsample_and_logprob()
log_pi = log_pi.unsqueeze(-1)
if self.use_automatic_entropy_tuning:
alpha_loss = -(self.log_alpha * (log_pi + self.target_entropy).detach()).mean()
alpha = self.log_alpha.exp()
else:
alpha_loss = 0
alpha = 1
q_new_actions = torch.min(
self.qf1(obs, new_obs_actions),
self.qf2(obs, new_obs_actions),
)
policy_loss = (alpha*log_pi - q_new_actions).mean()
"""
QF Loss
"""
q1_pred = self.qf1(obs, actions)
q2_pred = self.qf2(obs, actions)
next_dist = self.policy(next_obs)
new_next_actions, new_log_pi = next_dist.rsample_and_logprob()
new_log_pi = new_log_pi.unsqueeze(-1)
target_q_values = torch.min(
self.target_qf1(next_obs, new_next_actions),
self.target_qf2(next_obs, new_next_actions),
) - alpha * new_log_pi
q_target = self.reward_scale * rewards + (1. - terminals) * self.discount * target_q_values
qf1_loss = self.qf_criterion(q1_pred, q_target.detach())
qf2_loss = self.qf_criterion(q2_pred, q_target.detach())
"""
Save some statistics for eval
"""
eval_statistics = OrderedDict()
if not skip_statistics:
eval_statistics['QF1 Loss'] = np.mean(ptu.get_numpy(qf1_loss))
eval_statistics['QF2 Loss'] = np.mean(ptu.get_numpy(qf2_loss))
eval_statistics['Policy Loss'] = np.mean(ptu.get_numpy(
policy_loss
))
eval_statistics.update(create_stats_ordered_dict(
'Q1 Predictions',
ptu.get_numpy(q1_pred),
))
eval_statistics.update(create_stats_ordered_dict(
'Q2 Predictions',
ptu.get_numpy(q2_pred),
))
eval_statistics.update(create_stats_ordered_dict(
'Q Targets',
ptu.get_numpy(q_target),
))
eval_statistics.update(create_stats_ordered_dict(
'Log Pis',
ptu.get_numpy(log_pi),
))
policy_statistics = add_prefix(dist.get_diagnostics(), "policy/")
eval_statistics.update(policy_statistics)
if self.use_automatic_entropy_tuning:
eval_statistics['Alpha'] = alpha.item()
eval_statistics['Alpha Loss'] = alpha_loss.item()
loss = SACLosses(
policy_loss=policy_loss,
qf1_loss=qf1_loss,
qf2_loss=qf2_loss,
alpha_loss=alpha_loss,
)
return loss, eval_statistics
def get_diagnostics(self):
stats = super().get_diagnostics()
stats.update(self.eval_statistics)
return stats
def end_epoch(self, epoch):
self._need_to_update_eval_statistics = True
@property
def networks(self):
return [
self.policy,
self.qf1,
self.qf2,
self.target_qf1,
self.target_qf2,
]
@property
def optimizers(self):
return [
self.alpha_optimizer,
self.qf1_optimizer,
self.qf2_optimizer,
self.policy_optimizer,
]
def get_snapshot(self):
return dict(
policy=self.policy,
qf1=self.qf1,
qf2=self.qf2,
target_qf1=self.target_qf1,
target_qf2=self.target_qf2,
)
| mit | -7,586,187,944,391,447,000 | 30.026515 | 99 | 0.553534 | false |
pythondude325/flaskr | flaskr.py | 1 | 3871 | from postmarkup import render_bbcode
from sqlite3 import connect
from contextlib import closing
from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash
# config
DATABASE = 'flaskr.db'
DEBUG = True
SECRET_KEY = 'devkey'
USERNAME = 'admin'
PASSWORD = 'default'
# create app
app = Flask(__name__)
app.config.from_object(__name__)
# app.config.from_envvar('FLASKR_CONF')
# database functions
def connect_db():
return connect(app.config['DATABASE'])
def init_db():
with closing(connect_db()) as db:
with app.open_resource("schema.sql", mode="r") as f:
db.cursor().executescript(f.read())
db.commit()
# request functions
@app.before_request
def before_request():
g.db = connect_db()
@app.teardown_request
def teardown_request(_):
db = getattr(g, 'db', None)
if db is not None:
db.close()
# routes
@app.route('/')
def show_entries():
cur = g.db.execute('select * from entries order by id desc')
tag = request.args.get('tag')
entries = [dict(id=row[0], title=row[1], text=render_bbcode(row[2]), tags=row[3].split(", "))
for row in cur.fetchall() if tag in row[3].split(", ") or tag is None]
return render_template('show_entries.html', entries=entries)
@app.route('/add', methods=['POST'])
def add_entry():
if not session.get("logged_in"):
abort(401)
g.db.execute('insert into entries (title, text, tags) values (?, ?, ?)',
[request.form['title'], request.form['text'], request.form['tags']])
g.db.commit()
flash('New entry was successfully posted.')
return redirect(url_for('show_entries'))
@app.route('/post/<num>')
def show_post(num):
posts = g.db.execute('select * from entries where id is {}'.format(num))
entry = [dict(id=row[0], title=row[1], text=render_bbcode(row[2]), tags=row[3].split(", ")) for row in posts]
return render_template('show_entries.html', entries=entry)
@app.route('/delete')
def delete_post():
num = request.args.get('id')
if not session.get('logged_in'):
abort(401)
g.db.execute('delete from entries where id is {}'.format(num))
g.db.commit()
flash("Post #{} deleted".format(num))
return redirect(url_for('show_entries'))
@app.route('/edit', methods=['GET', 'POST'])
def edit_post():
if not session.get('logged_in'):
abort(401)
if request.method == 'GET':
num = request.args.get('id')
else:
num = request.form['id']
posts = g.db.execute('select * from entries where id is {}'.format(num))
entry = [dict(id=str(row[0]), title=row[1], text=row[2], tags=row[3]) for row in posts][0]
if request.method == 'GET':
return render_template('edit.html', entry=entry)
elif request.method == 'POST':
g.db.execute('update entries set title="{title}", text="{text}", tags="{tags}" where id = {id}'.format(
**request.form))
g.db.commit()
flash("Post #{} updated".format(num))
return redirect(url_for('show_entries'))
else:
# this should never happen
abort(405)
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'Invalid Username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'Invalid Password'
else:
session['logged_in'] = True
flash('You were logged in.')
return redirect(url_for('show_entries'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in')
flash('You were logged out.')
return redirect(url_for('show_entries'))
# boiler plate
if __name__ == '__main__':
app.run(port=5000, host="0.0.0.0")
| mit | -5,481,024,899,938,952,000 | 29.007752 | 113 | 0.612245 | false |
medularis/py-star | py_star/astemu.py | 1 | 4712 | from __future__ import absolute_import, print_function, unicode_literals
from os import fork, kill, waitpid
from signal import SIGTERM
import socket
from time import sleep
from . import compat_six as six
class Event(dict):
""" Events are encoded as dicts with a header fieldname to
content-list map. Normally (for all typical asterisk events) the
content-list only has one element. For multiple elements
multiple lines with the same header (but different content) are
sent. This tests cases where asterisk events contain multiple
instances of the same header.
The key 'CONTENT' is special, it denotes text that is appended
to an event (e.g. for testing the output of the command action)
"""
sort_order = dict(
(x, n) for n, x in enumerate((
'Event',
'Response',
'Username',
'Privilege',
'Secret',
'Command',
'Channel',
'ChannelState',
'ChannelStateDesc',
'CallerIDNum',
'CallerIDName',
'AccountCode',
'Context',
'Exten',
'Reason',
'Uniqueid',
'ActionID',
'OldAccountCode',
'Cause',
'Cause-txt',
))
)
sort_order ['CONTENT'] = 100000
def sort(self, x):
return self.sort_order.get(x[0], 10000)
def as_string(self, id):
ret = []
if 'Response' in self:
self ['ActionID'] = [id]
for k,v in sorted(self.items(), key=self.sort):
if k == 'CONTENT':
ret.append(v)
else :
if isinstance(v, six.string_types):
ret.append (": ".join ((k, v)))
else:
for x in v:
ret.append (": ".join ((k, x)))
ret.append ('')
ret.append ('')
return '\r\n'.join (ret).encode('utf-8')
@property
def name(self):
return self.get('Event','')
@property
def headers(self):
return self
class AsteriskEmu(object):
""" Emulator for asterisk management interface.
Used for unittests of :mod:`py_star.manager`.
Now factored into a standalone module for others to use in
unittests of programs that build on :mod:`py_star.manager`.
By default let the operating system decide the port number to
bind to, resulting port is stored in self.port.
"""
default_events = dict(
Login=(Event(Response=('Success', ),
Message=('Authentication accepted', )),),
Logoff=(Event(Response=('Goodbye', ),
Message=('Thanks for all the fish.', )),)
)
def __init__(self, chatscript, port = 0):
s = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
s.bind(('localhost', port))
s.listen(1)
pid = fork()
if not pid:
# won't return
self.asterisk_emu(s, chatscript)
self.childpid = pid
host, self.port = s.getsockname()
s.close()
def asterisk_emu(self, sock, chatscript):
""" Emulate asterisk management interface on a socket.
Chatscript is a dict of command names to event list mapping.
The event list contains events to send when the given
command is recognized.
"""
while True:
conn, addr = sock.accept()
f = conn.makefile('rwb')
conn.close()
f.write('Asterisk Call Manager/1.1\r\n'.encode('utf-8'))
f.flush()
cmd = lastid = ''
try:
for l in f:
l = l.decode('utf-8')
if l.startswith ('ActionID:'):
lastid = l.split(':', 1)[1].strip()
elif l.startswith ('Action:'):
cmd = l.split(':', 1)[1].strip()
elif not l.strip():
for d in chatscript, self.default_events:
if cmd in d:
for event in d[cmd]:
f.write(event.as_string(id = lastid))
f.flush()
if cmd == 'Logoff':
f.close()
break
except:
pass
sleep(10000) # wait for being killed
def close(self):
if self.childpid:
kill(self.childpid, SIGTERM)
waitpid(self.childpid, 0)
self.childpid = None
| bsd-3-clause | 8,920,116,669,416,906,000 | 32.41844 | 73 | 0.493633 | false |
cherokee/pyscgi | CTK/ProgressBar.py | 1 | 1975 | # CTK: Cherokee Toolkit
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2010-2011 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import os
from Widget import Widget
from util import props_to_str
HEADERS = [
'<link type="text/css" href="/CTK/css/CTK.css" rel="stylesheet" />',
'<script type="text/javascript" src="/CTK/js/jquery-ui-1.7.2.custom.min.js"></script>'
]
HTML = """
<div id="%(id)s" %(props)s></div>
"""
PERCENT_INIT_JS = """
$('#%(id)s').progressbar({ value: %(value)s });
"""
class ProgressBar (Widget):
def __init__ (self, props={}):
Widget.__init__ (self)
self.id = "progressbar_%d" %(self.uniq_id)
self.value = props.pop ('value', 0)
self.props = props.copy()
if 'class' in props:
self.props['class'] += ' progressbar'
else:
self.props['class'] = 'progressbar'
def Render (self):
render = Widget.Render (self)
props = {'id': self.id,
'value': self.value,
'props': props_to_str (self.props)}
render.html += HTML %(props)
render.js += PERCENT_INIT_JS %(props)
render.headers += HEADERS
return render
def JS_to_set (self, value):
return "$('#%s').progressbar ('option', 'value', %s);" %(self.id, value)
| bsd-3-clause | -5,721,624,957,532,368,000 | 28.924242 | 90 | 0.620759 | false |
jabaier/iic1103.20152.s5 | strings_listas_ej0.py | 1 | 1320 |
# defina una función que dado una lista de numeros
# retorne la suma de ellos
def sumalista(l):
# calcula l[0] + l[1] + l[2] + ... + l[??]
# el largo de la lista l se obtiene con len(l)
suma = 0
i = 0
while i < len(l):
suma = suma + l[i]
i = i + 1
return suma
def sumalista_cool(l):
suma = 0
for e in l:
suma = suma + e
return suma
def desafio_google(l):
i = 0
while i < len(l):
if sumalista(l[:i])==sumalista(l[i+1:]):
return i
i = i + 1
return -1
def mayusculas():
i = ord('A')
limite = ord('Z')
may=''
while i <= limite:
may = may + chr(i)
i = i + 1
return may
def minusculas():
return mayusculas().lower()
def encriptar_rot(mensaje, incremento):
M = mayusculas()
m = minusculas()
respuesta = ''
for c in mensaje:
indiceM = M.find(c)
indicem = m.find(c)
if indiceM > -1:
respuesta = respuesta + M[(indiceM+incremento)%26]
elif indicem > -1:
respuesta = respuesta + m[(indicem+incremento)%26]
else:
respuesta = respuesta + c
return respuesta
men = "Andate al cerro que mas te guste, querido"
enc = encriptar_rot(men,13)
desenc = encriptar_rot(enc,13)
print(enc)
print(desenc)
| unlicense | -7,947,223,740,876,585,000 | 19.292308 | 62 | 0.541319 | false |
jalavik/plotextractor | plotextractor/converter.py | 1 | 7853 | # -*- coding: utf-8 -*-
#
# This file is part of plotextractor.
# Copyright (C) 2010, 2011, 2015 CERN.
#
# plotextractor is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# plotextractor is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with plotextractor; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
import os
import tarfile
from invenio.utils.shell import run_shell_command, run_process_with_timeout, Timeout
from .output_utils import get_converted_image_name, \
write_message
def untar(original_tarball, sdir):
"""
Here we decide if our file is actually a tarball (sometimes the
'tarballs' gotten from arXiv aren't actually tarballs. If they
'contain' only the TeX file, then they are just that file.), then
we untar it if so and decide which of its constituents are the
TeX file and which are the images.
@param: tarball (string): the name of the tar file from arXiv
@param: dir (string): the directory where we would like it untarred to
@return: (image_list, tex_file) (([string, string, ...], string)):
list of images in the tarball and the name of the TeX file in the
tarball.
"""
if not tarfile.is_tarfile(original_tarball):
return ([], [], None)
tarball = tarfile.open(original_tarball)
tarball.extractall(sdir)
tex_output_contains = 'TeX'
tex_file_extension = 'tex'
image_output_contains = 'image'
eps_output_contains = '- type eps'
ps_output_contains = 'Postscript'
file_list = []
image_list = []
might_be_tex = []
for extracted_file in tarball.getnames():
if extracted_file == '':
break
if extracted_file.startswith('./'):
extracted_file = extracted_file[2:]
# ensure we are actually looking at the right file
extracted_file = os.path.join(sdir, extracted_file)
# Add to full list of extracted files
file_list.append(extracted_file)
dummy1, cmd_out, dummy2 = run_shell_command('file %s', (extracted_file,))
# is it TeX?
if cmd_out.find(tex_output_contains) > -1:
might_be_tex.append(extracted_file)
# is it an image?
elif cmd_out.lower().find(image_output_contains) > cmd_out.find(':') \
or \
cmd_out.lower().find(eps_output_contains) > cmd_out.find(':')\
or \
cmd_out.find(ps_output_contains) > cmd_out.find(':'):
# we have "image" in the output, and it is not in the filename
# i.e. filename.ext: blah blah image blah blah
image_list.append(extracted_file)
# if neither, maybe it is TeX or an image anyway, otherwise,
# we don't care
else:
if extracted_file.split('.')[-1].lower() == tex_file_extension:
# we might have tex source!
might_be_tex.append(extracted_file)
elif extracted_file.split('.')[-1] in ['eps', 'png', \
'ps', 'jpg', 'pdf']:
# we might have an image!
image_list.append(extracted_file)
if might_be_tex == []:
# well, that's tragic
# could not find TeX file in tar archive
return ([], [], [])
return (file_list, image_list, might_be_tex)
def check_for_gzip(tfile):
"""
Was that tarball also gzipped? Let's find out!
@param: file (string): the name of the object (so we can gunzip, if
that's necessary)
@output: a gunzipped file in the directory of choice, if that's necessary
@return new_file (string): The name of the file after gunzipping or the
original name of the file if that wasn't necessary
"""
gzip_contains = 'gzip compressed data'
dummy1, cmd_out, dummy2 = run_shell_command('file %s', (tfile,))
if cmd_out.find(gzip_contains) > -1:
# we have a gzip!
# so gzip is retarded and won't accept any file that doesn't end
# with .gz. sad.
run_shell_command('cp %s %s', (tfile, tfile + '.tar.gz'))
new_dest = os.path.join(os.path.split(tfile)[0], 'tmp.tar')
run_shell_command('touch %s', (new_dest,))
dummy1, cmd_out, cmd_err = run_shell_command('gunzip -c %s',
(tfile + '.tar.gz',))
if cmd_err != '':
write_message('Error while gunzipping ' + tfile)
return tfile
tarfile = open(new_dest, 'w')
tarfile.write(cmd_out)
tarfile.close()
run_shell_command('rm %s', (tfile + '.tar.gz',))
return new_dest
return tfile
def convert_images(image_list):
"""
Here we figure out the types of the images that were extracted from
the tarball and determine how to convert them into PNG.
@param: image_list ([string, string, ...]): the list of image files
extracted from the tarball in step 1
@return: image_list ([str, str, ...]): The list of image files when all
have been converted to PNG format.
"""
png_output_contains = 'PNG image'
ret_list = []
for image_file in image_list:
if os.path.isdir(image_file):
continue
# FIXME: here and everywhere else in the plot extractor
# library the run shell command statements should be (1)
# called with timeout in order to prevent runaway imagemagick
# conversions; (2) the arguments should be passed properly so
# that they are escaped.
dummy1, cmd_out, dummy2 = run_shell_command('file %s', (image_file,))
if cmd_out.find(png_output_contains) > -1:
ret_list.append(image_file)
else:
# we're just going to assume that ImageMagick can convert all
# the image types that we may be faced with
# for sure it can do EPS->PNG and JPG->PNG and PS->PNG
# and PSTEX->PNG
converted_image_file = get_converted_image_name(image_file)
cmd_list = ['convert', image_file, converted_image_file]
try:
dummy1, cmd_out, cmd_err = run_process_with_timeout(cmd_list)
if cmd_err == '' or os.path.exists(converted_image_file):
ret_list.append(converted_image_file)
else:
write_message('convert failed on ' + image_file)
except Timeout:
write_message('convert timed out on ' + image_file)
return ret_list
def extract_text(tarball):
"""
We check to see if there's a file called tarball.pdf, and, if there is,
we run pdftotext on it. Simple as that.
@param: tarball (string): the raw name of the tarball
@return: None
"""
try:
os.stat(tarball + '.pdf')
cmd_list = ['pdftotext', tarball + '.pdf ', tarball + '.txt']
dummy1, dummy2, cmd_err = run_process_with_timeout(cmd_list)
if cmd_err != '':
return - 1
write_message('generated ' + tarball + '.txt from ' + tarball + '.pdf')
except:
write_message('no text from ' + tarball + '.pdf')
| gpl-2.0 | 7,425,665,071,529,268,000 | 36.754808 | 84 | 0.605246 | false |
jminuscula/dixit-online | server/src/dixit/api/game/views/player.py | 1 | 2333 |
from django.db import IntegrityError
from django.shortcuts import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
from rest_framework import generics, status
from dixit.game.models import Player
from dixit.api.game.serializers.player import PlayerSerializer, PlayerCreateSerializer
from dixit.api.game.views.mixins import GameObjectMixin
class PlayerList(GameObjectMixin, generics.ListCreateAPIView):
"""
Implements Player list actions
- GET list of players for a game
- POST a new player t oa game from a player name
"""
model = Player
serializer_class = PlayerSerializer
permission_classes = (IsAuthenticated, )
def get_queryset(self):
return Player.objects.filter(game=self.get_game())
def get_serializer_class(self):
if self.request.method == 'POST':
return PlayerCreateSerializer
return PlayerSerializer
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
try:
game = self.get_game()
player = game.add_player(request.user, request.data['name'])
except IntegrityError as exc:
if 'user_id' in str(exc):
return Response({"detail": 'You are already playing this game'},
status=status.HTTP_403_FORBIDDEN)
return Response({"detail": "Username already in use"}, status=status.HTTP_403_FORBIDDEN)
data = PlayerSerializer(player).data
return Response(data, status=status.HTTP_201_CREATED)
class PlayerRetrieve(generics.RetrieveDestroyAPIView):
"""
Implements Player retrieve action
- GET player for game
"""
model = Player
serializer_class = PlayerSerializer
permission_classes = (IsAuthenticated, )
def get_object(self):
game_pk = self.kwargs['game_pk']
number = self.kwargs['player_number']
try:
return get_object_or_404(Player, game=game_pk, number=number)
except Player.DoesNotExist:
raise NotFound('player not found')
| mit | -6,770,100,734,929,883,000 | 32.811594 | 100 | 0.675954 | false |
kasra-hosseini/obspyDMT | obspyDMT/tests/test_input_handler.py | 1 | 6739 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------
# Filename: test_input_handler.py
# Purpose: testing input_handler
# Author: Kasra Hosseini
# Email: kasra.hosseinizad@earth.ox.ac.uk
# License: GPLv3
# -------------------------------------------------------------------
# -----------------------------------------------------------------------
# ----------------Import required Modules (Python and Obspy)-------------
# -----------------------------------------------------------------------
# Required Python and Obspy modules will be imported in this part.
import os
from obspy.core import UTCDateTime
from obspyDMT.utils.input_handler import command_parse, read_input_command
# ##################### test_command_parse ##################################
def test_command_parse():
(options, args, parser) = command_parse()
assert len(parser.option_groups) == 15
# ##################### test_read_input_command ###############################
def test_read_input_command():
(options, args, parser) = command_parse()
assert len(parser.option_groups[0].option_list) == 3
assert len(parser.option_groups[1].option_list) == 2
assert len(parser.option_groups[2].option_list) == 4
assert len(parser.option_groups[3].option_list) == 14
assert len(parser.option_groups[4].option_list) == 9
assert len(parser.option_groups[5].option_list) == 7
assert len(parser.option_groups[6].option_list) == 5
assert len(parser.option_groups[7].option_list) == 6
assert len(parser.option_groups[8].option_list) == 11
assert len(parser.option_groups[9].option_list) == 1
assert len(parser.option_groups[10].option_list) == 7
assert len(parser.option_groups[11].option_list) == 6
assert len(parser.option_groups[12].option_list) == 17
assert len(parser.option_groups[13].option_list) == 13
assert len(parser.option_groups[14].option_list) == 3
input_dics = read_input_command(parser)
return input_dics
# ##################### test_default_inputs ###############################
def test_default_inputs():
input_dics = test_read_input_command()
assert os.path.basename(input_dics['datapath']) == 'obspydmt-data'
assert input_dics['event_based'] is True
assert input_dics['data_source'] == ['IRIS']
assert input_dics['waveform'] is True
assert input_dics['response'] is True
assert input_dics['dir_select'] is False
assert input_dics['list_stas'] is False
assert input_dics['min_epi'] is False
assert input_dics['max_epi'] is False
assert input_dics['min_azi'] is False
assert input_dics['max_azi'] is False
assert input_dics['test'] is False
assert (UTCDateTime(input_dics['max_date']) -
UTCDateTime(input_dics['min_date']) > (60 * 60 * 24 * 365 * 45))
assert input_dics['preset'] == 0.0
assert input_dics['offset'] == 1800.0
assert input_dics['waveform_format'] is False
assert input_dics['resample_method'] == 'lanczos'
assert input_dics['sampling_rate'] is False
assert input_dics['net'] == '*'
assert input_dics['sta'] == '*'
assert input_dics['loc'] == '*'
assert input_dics['cha'] == '*'
assert input_dics['lat_cba'] is None
assert input_dics['lon_cba'] is None
assert input_dics['mr_cba'] is None
assert input_dics['Mr_cba'] is None
assert input_dics['mlat_rbb'] is None
assert input_dics['Mlat_rbb'] is None
assert input_dics['mlon_rbb'] is None
assert input_dics['Mlon_rbb'] is None
assert input_dics['req_np'] == 4
assert input_dics['process_np'] == 4
assert input_dics['username_fdsn'] is None
assert input_dics['password_fdsn'] is None
assert input_dics['username_arclink'] == 'test@obspy.org'
assert input_dics['password_arclink'] is ''
assert input_dics['host_arclink'] == 'webdc.eu'
assert input_dics['port_arclink'] == 18002
assert input_dics['event_catalog'] == 'LOCAL'
assert input_dics['min_depth'] == -10.0
assert input_dics['max_depth'] == +6000.0
assert input_dics['min_mag'] == 3.0
assert input_dics['max_mag'] == 10.
assert input_dics['mag_type'] is None
assert input_dics['evlatmin'] is None
assert input_dics['evlatmax'] is None
assert input_dics['evlonmin'] is None
assert input_dics['evlonmax'] is None
assert input_dics['evlat'] is None
assert input_dics['evlon'] is None
assert input_dics['evradmin'] is None
assert input_dics['evradmax'] is None
assert input_dics['interval'] == 3600*24
assert input_dics['pre_process'] == 'process_unit'
assert input_dics['select_data'] is False
assert input_dics['corr_unit'] == 'DIS'
assert input_dics['pre_filt'] == '(0.008, 0.012, 3.0, 4.0)'
assert input_dics['water_level'] == 600.0
assert input_dics['plot_dir_name'] == 'raw'
assert input_dics['plot_save'] is False
assert input_dics['plot_format'] is False
assert input_dics['show_no_plot'] is None
assert input_dics['plot_lon0'] == 180
assert input_dics['plot_style'] == 'simple'
assert input_dics['plotxml_date'] is False
assert input_dics['plotxml_start_stage'] == 1
assert input_dics['plotxml_end_stage'] == 100
assert input_dics['plotxml_min_freq'] == 0.01
assert input_dics['plotxml_percentage'] == 80
assert input_dics['plotxml_phase_threshold'] == 10.
assert input_dics['plotxml_output'] == 'VEL'
assert input_dics['email'] is False
assert input_dics['arc_avai_timeout'] == 40
assert input_dics['arc_wave_timeout'] == 2
# ##################### test_tour ###############################
def test_tour():
input_dics = test_read_input_command()
input_dics['datapath'] = './dmt_tour_dir'
input_dics['min_date'] = '2011-03-10'
input_dics['max_date'] = '2011-03-12'
input_dics['min_mag'] = '8.9'
input_dics['identity'] = 'TA.1*.*.BHZ'
input_dics['event_catalog'] = 'IRIS'
input_dics['req_parallel'] = True
input_dics['instrument_correction'] = True
input_dics['net'] = 'TA'
input_dics['sta'] = '1*'
input_dics['loc'] = '*'
input_dics['cha'] = 'BHZ'
from obspyDMT import obspyDMT
input_dics = obspyDMT.dmt_core(input_dics)
from glob import glob
assert len(glob('./dmt_tour_dir/*')) == 2
assert len(glob('./dmt_tour_dir/20110311_054623.a/processed/*')) == 13
assert len(glob('./dmt_tour_dir/20110311_054623.a/raw/*')) == 13
assert len(glob('./dmt_tour_dir/20110311_054623.a/resp/*')) == 13
assert len(glob('./dmt_tour_dir/20110311_054623.a/info/*')) >= 8
import shutil
shutil.rmtree('./dmt_tour_dir')
shutil.rmtree('./obspydmt-data')
| gpl-3.0 | -1,119,200,854,747,404,200 | 39.113095 | 79 | 0.599644 | false |
ixaxaar/sdcc | support/regression/compact-results.py | 1 | 1999 | import sys, re
import string
"""Simple script that scans all of the test suite results text fed in
through stdin and summarises the total number of failures, test
points, and test cases."""
# Read in everything
lines = sys.stdin.readlines()
# Init the running totals
failures = 0
cases = 0
tests = 0
bytes = 0
ticks = 0
invalid = 0
# hack for valdiag
name = ""
base = ""
for line in lines:
# --- Running: gen/ucz80/longor/longor
m = re.match(r'^--- Running: (.*)$', line)
if (m):
name = m.group(1)
# in case the test program crashes before the "--- Running" message
m = re.match(r'^[0-9]+ words read from (.*)\.ihx$',line)
if (m):
name = m.group(1)
base = name
m = re.match(r'([^/]*)/([^/]*)/([^/]*)/(.*)$', name)
if (m >= 3):
base = m.group(3)
# '--- Summary: f/t/c: ...', where f = # failures, t = # test points,
# c = # test cases.
if (re.search(r'^--- Summary:', line)):
(summary, data, rest) = re.split(r':', line)
(nfailures, ntests, ncases) = re.split(r'/', data)
failures = failures + string.atof(nfailures)
tests = tests + string.atof(ntests)
cases = cases + string.atof(ncases)
if (string.atof(nfailures)):
print "Failure: %s" % name
# '--- Simulator: b/t: ...', where b = # bytes, t = # ticks
if (re.search(r'^--- Simulator:', line)):
(simulator, data, rest) = re.split(r':', line)
(nbytes, nticks) = re.split(r'/', data)
bytes = bytes + string.atof(nbytes)
ticks = ticks + string.atof(nticks)
# Stop at 0x000228: (106) Invalid instruction 0x00fd
if (re.search(r'Invalid instruction', line) or re.search(r'unknown instruction', line)):
invalid += 1;
print "Invalid instruction: %s" % name
print "%-35.35s" % base,
if (invalid > 0):
print "%d invalid instructions," % invalid,
print "(f: %2.0f, t: %3.0f, c: %2.0f, b: %6.0f, t: %8.0f)" % (failures, tests, cases, bytes, ticks)
| gpl-2.0 | 9,146,120,504,725,573,000 | 29.287879 | 99 | 0.568284 | false |
mdworks2016/work_development | Python/20_Third_Certification/venv/lib/python3.7/site-packages/celery/backends/database/session.py | 1 | 1896 | # -*- coding: utf-8 -*-
"""SQLAlchemy session."""
from __future__ import absolute_import, unicode_literals
from kombu.utils.compat import register_after_fork
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import NullPool
ResultModelBase = declarative_base()
__all__ = ('SessionManager',)
def _after_fork_cleanup_session(session):
session._after_fork()
class SessionManager(object):
"""Manage SQLAlchemy sessions."""
def __init__(self):
self._engines = {}
self._sessions = {}
self.forked = False
self.prepared = False
if register_after_fork is not None:
register_after_fork(self, _after_fork_cleanup_session)
def _after_fork(self):
self.forked = True
def get_engine(self, dburi, **kwargs):
if self.forked:
try:
return self._engines[dburi]
except KeyError:
engine = self._engines[dburi] = create_engine(dburi, **kwargs)
return engine
else:
return create_engine(dburi, poolclass=NullPool)
def create_session(self, dburi, short_lived_sessions=False, **kwargs):
engine = self.get_engine(dburi, **kwargs)
if self.forked:
if short_lived_sessions or dburi not in self._sessions:
self._sessions[dburi] = sessionmaker(bind=engine)
return engine, self._sessions[dburi]
return engine, sessionmaker(bind=engine)
def prepare_models(self, engine):
if not self.prepared:
ResultModelBase.metadata.create_all(engine)
self.prepared = True
def session_factory(self, dburi, **kwargs):
engine, session = self.create_session(dburi, **kwargs)
self.prepare_models(engine)
return session()
| apache-2.0 | 277,522,112,381,838,560 | 30.6 | 78 | 0.634494 | false |
tensorflow/compression | tensorflow_compression/python/util/packed_tensors.py | 1 | 3070 | # Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Packed tensors in bit sequences."""
import tensorflow as tf
__all__ = [
"PackedTensors",
]
class PackedTensors:
"""Packed representation of compressed tensors.
This class can pack and unpack several tensor values into a single string. It
can also optionally store a model identifier.
The tensors currently must be rank 1 (vectors) and either have integer or
string type.
"""
def __init__(self, string=None):
self._example = tf.train.Example()
if string:
self.string = string
@property
def model(self):
"""A model identifier."""
buf = self._example.features.feature["MD"].bytes_list.value[0]
return buf.decode("ascii")
@model.setter
def model(self, value):
self._example.features.feature["MD"].bytes_list.value[:] = [
value.encode("ascii")]
@model.deleter
def model(self):
del self._example.features.feature["MD"]
@property
def string(self):
"""The string representation of this object."""
return self._example.SerializeToString()
@string.setter
def string(self, value):
self._example.ParseFromString(value)
def pack(self, tensors):
"""Packs `Tensor` values into this object."""
i = 1
for tensor in tensors:
feature = self._example.features.feature[chr(i)]
feature.Clear()
if tensor.shape.rank != 1:
raise RuntimeError(f"Unexpected tensor rank: {tensor.shape.rank}.")
if tensor.dtype.is_integer:
feature.int64_list.value[:] = tensor.numpy()
elif tensor.dtype == tf.string:
feature.bytes_list.value[:] = tensor.numpy()
else:
raise RuntimeError(f"Unexpected tensor dtype: '{tensor.dtype}'.")
i += 1
# Delete any remaining, previously set arrays.
while chr(i) in self._example.features.feature:
del self._example.features.feature[chr(i)]
i += 1
def unpack(self, dtypes):
"""Unpacks values from this object based on dtypes."""
tensors = []
for i, dtype in enumerate(dtypes):
dtype = tf.as_dtype(dtype)
feature = self._example.features.feature[chr(i + 1)]
if dtype.is_integer:
tensors.append(tf.constant(feature.int64_list.value, dtype=dtype))
elif dtype == tf.string:
tensors.append(tf.constant(feature.bytes_list.value, dtype=dtype))
else:
raise RuntimeError(f"Unexpected dtype: '{dtype}'.")
return tensors
| apache-2.0 | -766,657,654,993,320,700 | 30.979167 | 80 | 0.657003 | false |
wzin/interactivespaces-python-api | tests/test_master.py | 1 | 13234 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from mock import MagicMock
import json
import urllib
import urllib2
import sys
import os
sys.path.append(os.getcwd())
import interactivespaces
TEST_ACTIVITY_DATA = {
"id":"53",
"bundleContentHash":"hjkl",
"identifyingName":"com.endpoint.lg.browser",
"lastUploadDate":1398288057444,
"description":"Browser Activity to present \"webui\" activties to the user",
"name":"Browser Activity",
"lastStartDate":1401901320867,
"metadata":{},
"version":"1.0.0.dev"
}
TEST_LIVEACTIVITY_DATA = {
"lastDeployDate":"Mon May 05 12:50:36 PDT 2014",
"outOfDate":False,
"id":"110",
"description":"",
"name":"Evdev Demuxer on 42-a",
"active": {
"numberLiveActivityGroupRunning":1,
"runtimeState":"ACTIVE",
"deployState":"UNKNOWN",
"lastStateUpdate":"Wed Jun 04 11:17:21 PDT 2014",
"runtimeStateDescription":"space.activity.state.active",
"directRunning":False,
"directActivated":False,
"numberLiveActivityGroupActivated":1,
"deployStateDescription":"space.activity.state.unknown",
"deployStateDetail":None,
"runtimeStateDetail":"<p>foo</p>"
},
"controller": {
"id":"2",
"name":"ISCtlDispAScreen00",
"uuid":"372f0f95-6b48-487a-a1ac-383ba580fc1c"
},
"uuid":"88816d20-22f6-4f78-95ba-7843696c6bc5",
"activity": {
"id":"61",
"bundleContentHash":"qwerty",
"identifyingName":"com.endpoint.lg.evdev.demuxer",
"lastUploadDate":1398288062862,
"description":"Separates and aggregates different types of input events.",
"name":"Event Device Demuxer",
"lastStartDate":1401905841864,
"metadata":{},
"version":"1.0.0.dev"
},
"metadata":{}
}
TEST_LIVEACTIVITYGROUP_DATA = {
"id":"301",
"description":"",
"name":"Google Earth",
"metadata":{}
}
TEST_SPACE_DATA = {
"id":"401",
"description":"",
"name":"LG Express",
"metadata":{}
}
TEST_CONTROLLER_DATA = {
"state":"RUNNING",
"hostId":"ctldispascreen00",
"mode":"ENABLED",
"id":"2",
"stateDescription":"space.controller.state.running",
"modeDescription":"space.controller.mode.enabled",
"description":"Controller for Screen 00 on Display Node A",
"lastStateUpdateDate":"Wed Jun 04 12:25:57 PDT 2014",
"name":"ISCtlDispAScreen00",
"dataBundleStateDescription":"space.controller.dataBundle.state.none",
"uuid":"372f0f95-6b48-487a-a1ac-383ba580fc1c",
"dataBundleState":"NO_REQUEST",
"lastDataBundleStateUpdateDate":None,
"metadata":{}
}
TEST_NAMEDSCRIPT_DATA = {
"id":"3",
"name":"foo",
"description":"bar"
}
TEST_POST = {"foo":"bar"}
TEST_QUERY = {"zot":"zing"}
TEST_SESSION = 'e2s1'
TEST_HOST = '1.2.3.4'
TEST_PORT = 12345
def test_get_collection(data, method_to_test, expected_type, path_name):
"""Helper for testing collection getters."""
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._api_get_json = MagicMock(return_value=[data])
result = method_to_test(master)
master._api_get_json.assert_called_once_with('{}/all'.format(path_name))
return result
class MasterTests(unittest.TestCase):
def test_constructor(self):
"""Test construction with valid arguments."""
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
self.assertEqual(master.host, TEST_HOST)
self.assertEqual(master.port, TEST_PORT)
def test_api_get_json(self):
"""Test a valid call to Master._api_get_json()."""
class MockResponse(object):
def read():
return '{"result":"success","data":{"foo":"bar"}}'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'activity/all'
response = master._api_get_json(command)
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.json'.format(TEST_HOST, TEST_PORT, command)
)
self.assertEqual('bar', response['foo'])
def test_api_get_html(self):
"""Test a valid call to Master._api_get_html()."""
class MockResponse(object):
def read():
return 'asdf'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'activity/new'
response = master._api_get_html(command, {"foo":"bar"})
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.html?{}'.format(
TEST_HOST,
TEST_PORT,
command,
urllib.urlencode(TEST_QUERY)
)
)
self.assertEqual('asdf', response.read())
self.assertEqual(200, response.getcode())
def test_api_post_json(self):
"""Test a valid call to Master._api_post_json()."""
class MockResponse(object):
def read():
return '{"result":"success"}'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'liveactivity/42/configure'
master._api_post_json(command, TEST_QUERY, TEST_POST)
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.json?{}'.format(
TEST_HOST,
TEST_PORT,
command,
urllib.urlencode(TEST_QUERY)
),
urllib.urlencode(TEST_POST)
)
def test_api_post_html(self):
"""Test a valid call to Master._api_post_html()."""
class MockResponse(object):
def read():
return 'asdf'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'namescript/new'
master._api_post_html(command, TEST_QUERY, TEST_POST)
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.html?{}'.format(
TEST_HOST,
TEST_PORT,
command,
urllib.urlencode(TEST_QUERY)
),
urllib.urlencode(TEST_POST)
)
def test_get_all_activities(self):
"""Test Master.get_activities() with no pattern."""
expected_type = interactivespaces.Activity
result = test_get_collection(
data=TEST_ACTIVITY_DATA,
method_to_test=interactivespaces.Master.get_activities,
expected_type=expected_type,
path_name='activity'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_live_activities(self):
"""Test Master.get_live_activities() with no pattern."""
expected_type = interactivespaces.LiveActivity
result = test_get_collection(
data=TEST_LIVEACTIVITY_DATA,
method_to_test=interactivespaces.Master.get_live_activities,
expected_type=expected_type,
path_name='liveactivity'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_live_activity_groups(self):
"""Test Master.get_live_activity_groups() with no pattern."""
expected_type = interactivespaces.LiveActivityGroup
test_get_collection(
data=TEST_LIVEACTIVITYGROUP_DATA,
method_to_test=interactivespaces.Master.get_live_activity_groups,
expected_type=expected_type,
path_name='liveactivitygroup'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_spaces(self):
"""Test Master.get_spaces() with no pattern."""
expected_type = interactivespaces.Space
test_get_collection(
data=TEST_SPACE_DATA,
method_to_test=interactivespaces.Master.get_spaces,
expected_type=expected_type,
path_name='space'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_controllers(self):
"""Test Master.get_controllers() with no pattern."""
expected_type = interactivespaces.Controller
test_get_collection(
data=TEST_CONTROLLER_DATA,
method_to_test=interactivespaces.Master.get_controllers,
expected_type=expected_type,
ath_name='spacecontroller'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_named_scripts(self):
"""Test Master.get_named_scripts() with no pattern."""
expected_type = interactivespaces.NamedScript
test_get_collection(
data=TEST_NAMEDSCRIPT_DATA,
method_to_test=interactivespaces.Master.get_named_scripts,
expected_type=expected_type,
path_name='namedscript'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_new_live_activity(self):
"""Test a valid call to Master.new_live_activity()."""
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
class MockFirstResponse():
def getcode():
return 200
def geturl():
return 'http://{}:{}/liveactivity/new.html?execution={}'.format(
TEST_HOST,
TEST_PORT,
TEST_SESSION
)
class MockSecondResponse():
def getcode():
return 200
master._api_get_html = MagicMock(return_value=MockFirstResponse())
master._api_post_html = MagicMock(return_value=MockSecondResponse())
class MockActivity():
self.id = TEST_LIVEACTIVITY_DATA['activity']['id']
class MockController():
self.id = TEST_LIVEACTIVITY_DATA['controller']['id']
test_live_activity = master.new_live_activity(
TEST_LIVEACTIVITY_DATA['name'],
TEST_LIVEACTIVITY_DATA['description'],
MockActivity(),
MockController()
)
master._api_get_html.assert_called_once_with(
'liveactivity/new',
{"mode": "embedded"}
)
master._api_post_html.assert_called_once_with(
'liveactivity/new',
{"execution": TEST_SESSION},
{
"liveActivity.name": TEST_LIVEACTIVITY_DATA['name'],
"liveActivity.description": TEST_LIVEACTIVITY_DATA['description'],
"activityId": TEST_LIVEACTIVITY_DATA['activity']['id'],
"controllerId": TEST_LIVEACTIVITY_DATA['controller']['id'],
"_eventId_save": "Save"
}
)
self.assertIsInstance(
test_live_activity,
interactivespaces.LiveActivity
)
def main():
unittest.main()
if __name__ == '__main__':
main()
| apache-2.0 | -5,224,145,098,743,237,000 | 36.174157 | 102 | 0.504458 | false |
yollamttam/WordPrediction | EntropyBenchmarkUnigram.py | 1 | 1302 | import nltk
import glob
import pickle
import numpy as np
from fann2 import libfann
### Unigram perplexity
# obvious
alpha = 0.5
nExamples = 0
fileToEntropy = {}
# load pickle
wordsInOrder = pickle.load( open( "wordsInOrder.p", "rb" ) )
wordProb = pickle.load( open( "wordProbability.p", "rb" ) )
# load neural network
ann = libfann.neural_net()
ann.create_from_file("NN.net")
nFeatures = np.shape(wordProb)[0]-1
files = glob.glob('reuters/training/*')
files = files[:100]
fileNum = 0
for filename in files:
entropy = 0
fileNum += 1
print "%d of %d" % (fileNum,len(files))
openfile = open(filename,'r')
readfile = openfile.read()
tokens = nltk.word_tokenize(readfile)
# loop through tokens
for token in tokens:
token = token.lower()
if (token in wordsInOrder):
tokenIndex = wordsInOrder[token]
else:
tokenIndex = nFeatures
logProb = np.min((50,-1*np.log(wordProb[tokenIndex])))
entropy += logProb
entropy /= len(tokens)
print entropy
fileToEntropy[filename] = entropy
openfile.close()
avgEntropy = 0
for value in fileToEntropy.itervalues():
avgEntropy += value
avgEntropy /= len(fileToEntropy)
print avgEntropy
pickle.dump(fileToEntropy,open("fileToEntropy.p", "wb" ))
| apache-2.0 | 2,327,401,023,340,730,000 | 21.448276 | 62 | 0.659754 | false |
luiscape/hdxscraper-violation-documentation-center-syria | app/__main__.py | 1 | 1039 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
dir = os.path.split(os.path.split(os.path.realpath(__file__))[0])[0]
dir = os.path.join(dir, 'scripts')
sys.path.append(dir)
from setup.load import LoadConfig
from utilities.prompt_format import item
from utilities.database import CleanTable, StoreRecords
from scraper.scrape import ExctractTotalPages, ScrapeEndpoint
__version__ = 'v.0.1.1'
def Main():
'''Program wrapper.'''
config = LoadConfig('dev.json')
print '%s Version: %s' % (item('prompt_bullet'), __version__)
for endpoint in config['endpoints']:
data = ScrapeEndpoint(endpoint, verbose=config['verbose'])
#
# Clean table and store new records.
#
CleanTable(endpoint['name'])
StoreRecords(data, endpoint['name'])
#
# Loading configuration and
# running program.
#
if __name__ == '__main__':
try:
Main()
print '%s VDC scraped successfully.' % item('prompt_success')
except Exception as e:
print '%s VDC scraper failed.' % item('prompt_error')
print e
| mit | 3,159,288,300,322,429,400 | 21.106383 | 68 | 0.668912 | false |
sfjuocekr/PokeIV | setup.py | 1 | 1764 | #!/usr/bin/env python
#-- Setup file for py2exe
from distutils.core import setup
import py2exe
import sys, os
import Cryptodome
import requests
#find POGOProtos
sys.path.append("pgoapi\protos")
mydata = list()
path = Cryptodome.__path__[0]
root_end = path.find('Cryptodome')
for folder,folder_name,files in os.walk(path):
for file in files:
if os.path.splitext(file)[1] == '.pyd':
mydata.append((folder[root_end:], [os.path.join(folder,file)]))
path = requests.__path__[0]
root_end = path.find('requests')
for folder,folder_name,files in os.walk(path):
for file in files:
if file == 'cacert.pem':
mydata.append((folder[root_end:], [os.path.join(folder,file)]))
path = os.path.join(os.path.dirname(os.path.realpath(__file__)),'pgoapi')
root_end = 'pgoapi'
for folder,folder_name,files in os.walk(path):
for file in files:
if os.path.splitext(file)[1] == '.json':
mydata.append((root_end, [os.path.join(folder,file)]))
mydata.extend(('families.tsv','evolves.tsv','german-names.tsv','config.json'))
setup(data_files=mydata,
windows = [{'script': "pokeIV.py"}],
zipfile = None,
options= {
"py2exe":{
"packages": ['s2sphere',
'six',
'gpsoauth',
'geopy',
'requests',
'Cryptodome',
'POGOProtos',
'POGOProtos.Networking.Requests',
'POGOProtos.Networking.Requests.Messages_pb2',
'POGOProtos.Networking.Responses_pb2']
,'bundle_files': 1
,'compressed': True
,'dll_excludes': [ 'crypt32.dll', 'mpr.dll']
}
})
| mit | 1,747,010,191,501,326,800 | 29.413793 | 78 | 0.560091 | false |
Matla/Python_Logging | logging/logging.py | 1 | 6683 | __author__ = "Mats Larsen"
__copyright__ = "Mats Larsen2014"
__credits__ = ["Morten Lind"]
__license__ = "GPL"
__maintainer__ = "Mats Larsen"
__email__ = "matsla@{ntnu.no}"
__status__ = "Development"
#--------------------------------------------------------------------
#File: logging.py
#Module Description
"""
This module is able to log data depending of the modes.
"""
#--------------------------------------------------------------------
#IMPORT
#--------------------------------------------------------------------
import traceback
import threading
import sys
import time
import numpy as np
from timermanager import TimerManager as TM
#--------------------------------------------------------------------
#CONSTANTS
#--------------------------------------------------------------------
LOG_LEVEL = 2 # Information level
ALWAYS_LOG_LEVEL = 2
FILE = 'logging'
#Modes for the logging class
modes = {'ft-sensor' : '_force_torque_logging_mode',
'Joint_Angles' : '_joint_angles',
}
#--------------------------------------------------------------------
#METHODS
#-------------------------------------------------------------------
def log(msg, log_level=LOG_LEVEL):
"""
Print a message, and track, where the log is invoked
Input:
-msg: message to be printed, ''
-log_level: informationlevel """
global LOG_LEVEL
if log_level <= LOG_LEVEL:
print(str(log_level) + ' : ' + FILE +'.py::' + traceback.extract_stack()[-2][2] + ' : ' + msg)
class Logging(threading.Thread):
""" This class create an instance to logging in a custom mode. """
class Error(Exception):
"""Exception class."""
def __init__(self, message):
self.message = message
Exception.__init__(self, self.message)
def __repr__(self):
return self.message
def __init__(self,name='logging_instance',
logging_mode=None,
file_names=[],
ref_class=None,
freq=None,
log_level=3):
# Assignment
self._name=name # name of the instance
self._current_logging_mode=logging_mode # which mode to log
self._ref=ref_class # the class to listen on
self._files = file_names # files name, to decide the name of the file
self._log_level=log_level # information level
#Threading
threading.Thread.__init__(self) # initialize th
self.daemon = True
#Event
self._thread_alive = threading.Event() # status for the thread
self._thread_terminated = threading.Event() # status for the thread terminated
#Reset
self._thread_alive.clear()
self._thread_terminated.clear()
log('Logging instance ' + self._name + ' is initialized : ', self._log_level)
self.start() # start the thread
def get_name(self):
"""Returning the name of the instance."""
return self._name
name = property(get_name,'Name Property')
def _force_torque_logging_mode(self):
"""This mode will lock the data from the ft-sensor"""
info,force,torque = self._ref.get_data_ATI(sync=True,timeout=1,data_type=None) # wait for data
if info != None:
force = np.average(force,axis=0)
torque = np.average(torque,axis=0)
info = info[0]
c_time = time.time() - self._first_time # get time stamp
for i in range(0,3):
self._data_list[i].append(info[i])
self._data_list[3+i].append(force[i])
self._data_list[6+i].append(torque[i])
self._data_list[9].append(c_time)
def _joint_angles(self):
"""This mode will log the joint angels."""
self._ref.robot_facade.wait_for_control()
time.sleep(1)
def stop_joint_angles_listner(self):
self._ref.robot_facade.unsubscribe(self.log_joint_angles_listner)
def log_joint_angles_listner(self, event_time):
self._data_list[0].append(event_time)
self._data_list[1].append(self._ref.robot_facade.act_joint_pos.tolist())
self._data_list[2].append(self._ref.robot_facade.cmd_joint_pos.tolist())
def run(self):
"""The thread is running in this loop."""
log('Logging Instance ' + self._name + ' is RUNNING', ALWAYS_LOG_LEVEL)
self._thread_alive.set() # set the thread to be alive
self._thread_terminated.clear() # clear the terminated event
self._data_list = [] # a list that contain all files
for i in self._files:
self._data_list.append([])
self._first_time = time.time()
if self._current_logging_mode == 'Joint_Angles':
self._ref.robot_facade.subscribe(self.log_joint_angles_listner)
while self._thread_alive.isSet() == True:
try:
method = getattr(self,modes[self._current_logging_mode])
except AttributeError:
raise self.Error(task + ' not found !!!! : ' + '"{}"'.format(self._name))
else:
method() # call task from the queue
if self._current_logging_mode == 'Joint_Angles':
self._ref.robot_facade.unsubscribe(self.log_joint_angles_listner)
self._ref.robot_facade.wait_for_control()
self._file_list = [] # a list that contain all files
for i in self._files:
self._file_list.append(open(i+'.txt','w'))
for i in range(0,len(self._files)):
for j in self._data_list[i]:
self._file_list[i].write(str(j) + '\n')
for i in self._file_list:
i.close
self._thread_terminated.set()
def stop(self):
"""Stop the thread, and also stop the reciver class and
close the socket."""
log('Trying to stop LOGGING', self._log_level)
if self._thread_alive.isSet(): # if the thread is alive
self._thread_alive.clear() # set flag to false
else:
raise Exception('LOGGING: '
+ 'Is already stopped')
def wait_startup(self,timeout=None):
"""Wait to this thread is started up, expect
if a timeout is given.
Inputs:
timeout:float-> timeout given in secs."""
if self._thread_alive.wait(timeout):
return True
else:
return False
def wait_terminated(self,timeout=None):
"""Wait to this thread is terminated, expect
if a timeout is given.
Inputs:
timeout:float-> timeout given in secs."""
if self._thread_terminated.wait(timeout):
return True
else:
return False
| gpl-3.0 | -5,752,381,826,589,844,000 | 38.081871 | 102 | 0.540326 | false |
emedvedev/st2 | st2actions/st2actions/runners/windows_command_runner.py | 1 | 4081 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from eventlet.green import subprocess
from st2common import log as logging
from st2common.util.green.shell import run_command
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED, LIVEACTION_STATUS_FAILED
from st2common.constants.runners import WINDOWS_RUNNER_DEFAULT_ACTION_TIMEOUT
from st2actions.runners.windows_runner import BaseWindowsRunner
LOG = logging.getLogger(__name__)
# constants to lookup in runner_parameters
RUNNER_HOST = 'host'
RUNNER_USERNAME = 'username'
RUNNER_PASSWORD = 'password'
RUNNER_COMMAND = 'cmd'
RUNNER_TIMEOUT = 'timeout'
def get_runner():
return WindowsCommandRunner(str(uuid.uuid4()))
class WindowsCommandRunner(BaseWindowsRunner):
"""
Runner which executes commands on a remote Windows machine.
"""
def __init__(self, runner_id, timeout=WINDOWS_RUNNER_DEFAULT_ACTION_TIMEOUT):
"""
:param timeout: Action execution timeout in seconds.
:type timeout: ``int``
"""
super(WindowsCommandRunner, self).__init__(runner_id=runner_id)
self._timeout = timeout
def pre_run(self):
super(WindowsCommandRunner, self).pre_run()
# TODO :This is awful, but the way "runner_parameters" and other variables get
# assigned on the runner instance is even worse. Those arguments should
# be passed to the constructor.
self._host = self.runner_parameters.get(RUNNER_HOST, None)
self._username = self.runner_parameters.get(RUNNER_USERNAME, None)
self._password = self.runner_parameters.get(RUNNER_PASSWORD, None)
self._command = self.runner_parameters.get(RUNNER_COMMAND, None)
self._timeout = self.runner_parameters.get(RUNNER_TIMEOUT, self._timeout)
def run(self, action_parameters):
# Make sure the dependencies are available
self._verify_winexe_exists()
args = self._get_winexe_command_args(host=self._host, username=self._username,
password=self._password,
command=self._command)
# Note: We don't send anything over stdin, we just create an unused pipe
# to avoid some obscure failures
exit_code, stdout, stderr, timed_out = run_command(cmd=args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
timeout=self._timeout)
if timed_out:
error = 'Action failed to complete in %s seconds' % (self._timeout)
else:
error = None
if exit_code != 0:
error = self._parse_winexe_error(stdout=stdout, stderr=stderr)
result = stdout
output = {
'stdout': stdout,
'stderr': stderr,
'exit_code': exit_code,
'result': result
}
if error:
output['error'] = error
status = LIVEACTION_STATUS_SUCCEEDED if exit_code == 0 else LIVEACTION_STATUS_FAILED
return (status, output, None)
| apache-2.0 | -9,076,009,354,234,646,000 | 38.621359 | 92 | 0.627542 | false |
psathyrella/partis | bin/add-seqs-to-outputs.py | 1 | 5004 | #!/usr/bin/env python
import csv
import os
import sys
csv.field_size_limit(sys.maxsize) # make sure we can write very large csv fields
import argparse
import operator
import colored_traceback.always
import collections
# if you move this script, you'll need to change this method of getting the imports
partis_dir = os.path.dirname(os.path.realpath(__file__)).replace('/bin', '')
sys.path.insert(1, partis_dir + '/python')
import utils
import glutils
from clusterpath import ClusterPath
dstr = """
Add seqs from the fasta file --new-seq-file to an annotation from --partis-output-file.
Looks for a cluster in the best partition that has sequences in common with the fasta file (and crashes if there's more than one such cluster).
Writes a single modified annotation to --outfile.
"""
parser = argparse.ArgumentParser(description=dstr,
formatter_class=argparse.ArgumentDefaultsHelpFormatter) # why tf isn't this printing the defaults?
parser.add_argument('--new-seq-file', required=True, help='fasta input file with seqs to be added to annotations + partitions in partis output yaml')
parser.add_argument('--partis-output-file', required=True, help='partis output file to which to add the seqs from --new-seq-file')
parser.add_argument('--partition-index', type=int, help='index of partition from which to take the clusters/annotations (if not set, uses the best partition)')
parser.add_argument('--glfo-dir', default=partis_dir + '/data/germlines/human', help='germline info directory. Only used if --partis-output-file is an old-style .csv, and this default dir may work if your output file doesn\'t have novel inferred genes. Otherwise, is the germline info dir from the partis inferred parameter directory corresponding to your output file --partis-output-file.')
parser.add_argument('--locus', default='igh')
parser.add_argument('--outfile', required=True, help='output partis yaml file')
parser.add_argument('--debug', action='store_true')
parser.add_argument('--n-test-subset-seqs', type=int, help='take only the first N seqs from both the fasta file and the annotation in the partis output file (e.g. for testing when the family is huge)')
args = parser.parse_args()
new_seqfos = utils.read_fastx(args.new_seq_file, sanitize_seqs=True)
print ' read %d seqs from %s' % (len(new_seqfos), args.new_seq_file)
glfo = None
if utils.getsuffix(args.partis_output_file) == '.csv':
print ' reading deprecated csv format, so need to read germline info from somewhere else, using --glfo-dir %s, hopefully it works' % args.glfo_dir
glfo = glutils.read_glfo(args.glfo_dir, locus=args.locus)
glfo, annotation_list, cpath = utils.read_output(args.partis_output_file, glfo=glfo, locus=args.locus)
if args.partition_index is not None:
print ' using non-best partition index %d (best is %d)' % (args.partition_index, cpath.i_best)
partition = cpath.partitions[cpath.i_best if args.partition_index is None else args.partition_index]
print ' read partition with %d clusters from %s' % (len(partition), args.partis_output_file)
new_uids = set(sfo['name'] for sfo in new_seqfos)
clusters_with_overlap = []
for cluster in partition:
overlap_uids = set(cluster) & new_uids
if len(overlap_uids) > 0:
clusters_with_overlap.append((cluster, overlap_uids))
if len(clusters_with_overlap) == 0:
raise Exception('no clusters in partition have any overlap with sequences from fasta file')
elif len(clusters_with_overlap) > 1:
# raise Exception('too many clusters %d in the partition overlaps with sequences from the fasta file' % len(clusters_with_overlap))
clusters_with_overlap = sorted(clusters_with_overlap, key=lambda p: len(p[1]), reverse=True)
ostrs = ['%d %d'%(len(c), len(o)) for c, o in clusters_with_overlap]
print ' %s more than one cluster overlaps with sequences from fasta file, just taking first one (size overlap): %s, %s' % (utils.color('yellow', 'warning'), utils.color('red', ostrs[0]), ', '.join(ostrs[1:]))
old_cluster = clusters_with_overlap[0][0]
print ' adding %d fasta sequences to cluster of size %d (%d fasta sequences were already in cluster)' % (len(new_uids - set(old_cluster)), len(old_cluster), len(new_uids & set(old_cluster)))
sfos_to_add = [sfo for sfo in new_seqfos if sfo['name'] not in old_cluster]
annotation_dict = utils.get_annotation_dict(annotation_list)
annotation = annotation_dict[':'.join(old_cluster)]
if args.n_test_subset_seqs is not None:
print ' taking only first %d seqs from fasta and annotation' % args.n_test_subset_seqs
utils.restrict_to_iseqs(annotation, list(range(args.n_test_subset_seqs)), glfo)
sfos_to_add = sfos_to_add[:args.n_test_subset_seqs]
utils.add_seqs_to_line(annotation, sfos_to_add, glfo, debug=args.debug)
output_headers = list(set(annotation_list[0].keys()) | set(utils.annotation_headers)) # try to pick up any extra headers that were written to the file
utils.write_annotations(args.outfile, glfo, [annotation], output_headers)
| gpl-3.0 | 3,960,263,518,355,998,000 | 63.153846 | 391 | 0.733613 | false |
adityahase/frappe | frappe/core/doctype/navbar_settings/navbar_settings.py | 1 | 1160 | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class NavbarSettings(Document):
def validate(self):
self.validate_standard_navbar_items()
def validate_standard_navbar_items(self):
doc_before_save = self.get_doc_before_save()
before_save_items = [item for item in \
doc_before_save.help_dropdown + doc_before_save.settings_dropdown if item.is_standard]
after_save_items = [item for item in \
self.help_dropdown + self.settings_dropdown if item.is_standard]
if not frappe.flags.in_patch and (len(before_save_items) > len(after_save_items)):
frappe.throw(_("Please hide the standard navbar items instead of deleting them"))
@frappe.whitelist()
def get_app_logo():
app_logo = frappe.db.get_single_value('Navbar Settings', 'app_logo')
if not app_logo:
app_logo = frappe.get_hooks('app_logo_url')[-1]
return app_logo
def get_navbar_settings():
navbar_settings = frappe.get_single('Navbar Settings')
return navbar_settings
| mit | 5,117,916,987,784,409,000 | 28 | 89 | 0.735345 | false |
Julian/home-assistant | homeassistant/components/notify/pushbullet.py | 1 | 4409 | """
PushBullet platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.pushbullet/
"""
import logging
from homeassistant.components.notify import (
ATTR_TARGET, ATTR_TITLE, BaseNotificationService)
from homeassistant.const import CONF_API_KEY
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def get_service(hass, config):
"""Get the PushBullet notification service."""
from pushbullet import PushBullet
from pushbullet import InvalidKeyError
if CONF_API_KEY not in config:
_LOGGER.error("Unable to find config key '%s'", CONF_API_KEY)
return None
try:
pushbullet = PushBullet(config[CONF_API_KEY])
except InvalidKeyError:
_LOGGER.error(
"Wrong API key supplied. "
"Get it at https://www.pushbullet.com/account")
return None
return PushBulletNotificationService(pushbullet)
# pylint: disable=too-few-public-methods
class PushBulletNotificationService(BaseNotificationService):
"""Implement the notification service for Pushbullet."""
def __init__(self, pb):
"""Initialize the service."""
self.pushbullet = pb
self.pbtargets = {}
self.refresh()
def refresh(self):
"""Refresh devices, contacts, etc.
pbtargets stores all targets available from this pushbullet instance
into a dict. These are PB objects!. It sacrifices a bit of memory
for faster processing at send_message.
As of sept 2015, contacts were replaced by chats. This is not
implemented in the module yet.
"""
self.pushbullet.refresh()
self.pbtargets = {
'device': {
tgt.nickname.lower(): tgt for tgt in self.pushbullet.devices},
'channel': {
tgt.channel_tag.lower(): tgt for
tgt in self.pushbullet.channels},
}
def send_message(self, message=None, **kwargs):
"""Send a message to a specified target.
If no target specified, a 'normal' push will be sent to all devices
linked to the PB account.
Email is special, these are assumed to always exist. We use a special
call which doesn't require a push object.
"""
targets = kwargs.get(ATTR_TARGET)
title = kwargs.get(ATTR_TITLE)
refreshed = False
if not targets:
# Backward compatebility, notify all devices in own account
self.pushbullet.push_note(title, message)
_LOGGER.info('Sent notification to self')
return
# Make list if not so
if not isinstance(targets, list):
targets = [targets]
# Main loop, Process all targets specified
for target in targets:
try:
ttype, tname = target.split('/', 1)
except ValueError:
_LOGGER.error('Invalid target syntax: %s', target)
continue
# Target is email, send directly, don't use a target object
# This also seems works to send to all devices in own account
if ttype == 'email':
self.pushbullet.push_note(title, message, email=tname)
_LOGGER.info('Sent notification to email %s', tname)
continue
# Refresh if name not found. While awaiting periodic refresh
# solution in component, poor mans refresh ;)
if ttype not in self.pbtargets:
_LOGGER.error('Invalid target syntax: %s', target)
continue
tname = tname.lower()
if tname not in self.pbtargets[ttype] and not refreshed:
self.refresh()
refreshed = True
# Attempt push_note on a dict value. Keys are types & target
# name. Dict pbtargets has all *actual* targets.
try:
self.pbtargets[ttype][tname].push_note(title, message)
_LOGGER.info('Sent notification to %s/%s', ttype, tname)
except KeyError:
_LOGGER.error('No such target: %s/%s', ttype, tname)
continue
except self.pushbullet.errors.PushError:
_LOGGER.error('Notify failed to: %s/%s', ttype, tname)
continue
| mit | -4,096,327,190,134,021,000 | 33.992063 | 78 | 0.604445 | false |
mdworks2016/work_development | Python/20_Third_Certification/venv/lib/python3.7/site-packages/django/contrib/gis/db/models/functions.py | 1 | 16962 | from decimal import Decimal
from django.contrib.gis.db.models.fields import BaseSpatialField, GeometryField
from django.contrib.gis.db.models.sql import AreaField, DistanceField
from django.contrib.gis.geos import GEOSGeometry
from django.core.exceptions import FieldError
from django.db.models import (
BooleanField, FloatField, IntegerField, TextField, Transform,
)
from django.db.models.expressions import Func, Value
from django.db.models.functions import Cast
from django.db.utils import NotSupportedError
from django.utils.functional import cached_property
NUMERIC_TYPES = (int, float, Decimal)
class GeoFuncMixin:
function = None
geom_param_pos = (0,)
def __init__(self, *expressions, **extra):
super().__init__(*expressions, **extra)
# Ensure that value expressions are geometric.
for pos in self.geom_param_pos:
expr = self.source_expressions[pos]
if not isinstance(expr, Value):
continue
try:
output_field = expr.output_field
except FieldError:
output_field = None
geom = expr.value
if not isinstance(geom, GEOSGeometry) or output_field and not isinstance(output_field, GeometryField):
raise TypeError("%s function requires a geometric argument in position %d." % (self.name, pos + 1))
if not geom.srid and not output_field:
raise ValueError("SRID is required for all geometries.")
if not output_field:
self.source_expressions[pos] = Value(geom, output_field=GeometryField(srid=geom.srid))
@property
def name(self):
return self.__class__.__name__
@cached_property
def geo_field(self):
return self.source_expressions[self.geom_param_pos[0]].field
def as_sql(self, compiler, connection, function=None, **extra_context):
if self.function is None and function is None:
function = connection.ops.spatial_function_name(self.name)
return super().as_sql(compiler, connection, function=function, **extra_context)
def resolve_expression(self, *args, **kwargs):
res = super().resolve_expression(*args, **kwargs)
# Ensure that expressions are geometric.
source_fields = res.get_source_fields()
for pos in self.geom_param_pos:
field = source_fields[pos]
if not isinstance(field, GeometryField):
raise TypeError(
"%s function requires a GeometryField in position %s, got %s." % (
self.name, pos + 1, type(field).__name__,
)
)
base_srid = res.geo_field.srid
for pos in self.geom_param_pos[1:]:
expr = res.source_expressions[pos]
expr_srid = expr.output_field.srid
if expr_srid != base_srid:
# Automatic SRID conversion so objects are comparable.
res.source_expressions[pos] = Transform(expr, base_srid).resolve_expression(*args, **kwargs)
return res
def _handle_param(self, value, param_name='', check_types=None):
if not hasattr(value, 'resolve_expression'):
if check_types and not isinstance(value, check_types):
raise TypeError(
"The %s parameter has the wrong type: should be %s." % (
param_name, check_types)
)
return value
class GeoFunc(GeoFuncMixin, Func):
pass
class GeomOutputGeoFunc(GeoFunc):
@cached_property
def output_field(self):
return GeometryField(srid=self.geo_field.srid)
class SQLiteDecimalToFloatMixin:
"""
By default, Decimal values are converted to str by the SQLite backend, which
is not acceptable by the GIS functions expecting numeric values.
"""
def as_sqlite(self, compiler, connection, **extra_context):
for expr in self.get_source_expressions():
if hasattr(expr, 'value') and isinstance(expr.value, Decimal):
expr.value = float(expr.value)
return super().as_sql(compiler, connection, **extra_context)
class OracleToleranceMixin:
tolerance = 0.05
def as_oracle(self, compiler, connection, **extra_context):
tolerance = Value(self._handle_param(
self.extra.get('tolerance', self.tolerance),
'tolerance',
NUMERIC_TYPES,
))
clone = self.copy()
clone.set_source_expressions([*self.get_source_expressions(), tolerance])
return clone.as_sql(compiler, connection, **extra_context)
class Area(OracleToleranceMixin, GeoFunc):
arity = 1
@cached_property
def output_field(self):
return AreaField(self.geo_field)
def as_sql(self, compiler, connection, **extra_context):
if not connection.features.supports_area_geodetic and self.geo_field.geodetic(connection):
raise NotSupportedError('Area on geodetic coordinate systems not supported.')
return super().as_sql(compiler, connection, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
extra_context['template'] = '%(function)s(%(expressions)s, %(spheroid)d)'
extra_context['spheroid'] = True
return self.as_sql(compiler, connection, **extra_context)
class Azimuth(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class AsGeoJSON(GeoFunc):
output_field = TextField()
def __init__(self, expression, bbox=False, crs=False, precision=8, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
if options:
expressions.append(options)
super().__init__(*expressions, **extra)
class AsGML(GeoFunc):
geom_param_pos = (1,)
output_field = TextField()
def __init__(self, expression, version=2, precision=8, **extra):
expressions = [version, expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
super().__init__(*expressions, **extra)
def as_oracle(self, compiler, connection, **extra_context):
source_expressions = self.get_source_expressions()
version = source_expressions[0]
clone = self.copy()
clone.set_source_expressions([source_expressions[1]])
extra_context['function'] = 'SDO_UTIL.TO_GML311GEOMETRY' if version.value == 3 else 'SDO_UTIL.TO_GMLGEOMETRY'
return super(AsGML, clone).as_sql(compiler, connection, **extra_context)
class AsKML(AsGML):
def as_sqlite(self, compiler, connection, **extra_context):
# No version parameter
clone = self.copy()
clone.set_source_expressions(self.get_source_expressions()[1:])
return clone.as_sql(compiler, connection, **extra_context)
class AsSVG(GeoFunc):
output_field = TextField()
def __init__(self, expression, relative=False, precision=8, **extra):
relative = relative if hasattr(relative, 'resolve_expression') else int(relative)
expressions = [
expression,
relative,
self._handle_param(precision, 'precision', int),
]
super().__init__(*expressions, **extra)
class BoundingCircle(OracleToleranceMixin, GeoFunc):
def __init__(self, expression, num_seg=48, **extra):
super().__init__(expression, num_seg, **extra)
def as_oracle(self, compiler, connection, **extra_context):
clone = self.copy()
clone.set_source_expressions([self.get_source_expressions()[0]])
return super(BoundingCircle, clone).as_oracle(compiler, connection, **extra_context)
class Centroid(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Difference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class DistanceResultMixin:
@cached_property
def output_field(self):
return DistanceField(self.geo_field)
def source_is_geography(self):
return self.geo_field.geography and self.geo_field.srid == 4326
class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
geom_param_pos = (0, 1)
spheroid = None
def __init__(self, expr1, expr2, spheroid=None, **extra):
expressions = [expr1, expr2]
if spheroid is not None:
self.spheroid = self._handle_param(spheroid, 'spheroid', bool)
super().__init__(*expressions, **extra)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
expr2 = clone.source_expressions[1]
geography = self.source_is_geography()
if expr2.output_field.geography != geography:
if isinstance(expr2, Value):
expr2.output_field.geography = geography
else:
clone.source_expressions[1] = Cast(
expr2,
GeometryField(srid=expr2.output_field.srid, geography=geography),
)
if not geography and self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need special distance functions
if self.spheroid:
# DistanceSpheroid is more accurate and resource intensive than DistanceSphere
function = connection.ops.spatial_function_name('DistanceSpheroid')
# Replace boolean param by the real spheroid of the base field
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
function = connection.ops.spatial_function_name('DistanceSphere')
return super(Distance, clone).as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
# SpatiaLite returns NULL instead of zero on geodetic coordinates
extra_context['template'] = 'COALESCE(%(function)s(%(expressions)s, %(spheroid)s), 0)'
extra_context['spheroid'] = int(bool(self.spheroid))
return super().as_sql(compiler, connection, **extra_context)
class Envelope(GeomOutputGeoFunc):
arity = 1
class ForcePolygonCW(GeomOutputGeoFunc):
arity = 1
class GeoHash(GeoFunc):
output_field = TextField()
def __init__(self, expression, precision=None, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
super().__init__(*expressions, **extra)
def as_mysql(self, compiler, connection, **extra_context):
clone = self.copy()
# If no precision is provided, set it to the maximum.
if len(clone.source_expressions) < 2:
clone.source_expressions.append(Value(100))
return clone.as_sql(compiler, connection, **extra_context)
class GeometryDistance(GeoFunc):
output_field = FloatField()
arity = 2
function = ''
arg_joiner = ' <-> '
geom_param_pos = (0, 1)
class Intersection(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
@BaseSpatialField.register_lookup
class IsValid(OracleToleranceMixin, GeoFuncMixin, Transform):
lookup_name = 'isvalid'
output_field = BooleanField()
def as_oracle(self, compiler, connection, **extra_context):
sql, params = super().as_oracle(compiler, connection, **extra_context)
return "CASE %s WHEN 'TRUE' THEN 1 ELSE 0 END" % sql, params
class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
def __init__(self, expr1, spheroid=True, **extra):
self.spheroid = spheroid
super().__init__(expr1, **extra)
def as_sql(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection) and not connection.features.supports_length_geodetic:
raise NotSupportedError("This backend doesn't support Length on geodetic fields")
return super().as_sql(compiler, connection, **extra_context)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
if self.source_is_geography():
clone.source_expressions.append(Value(self.spheroid))
elif self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
function = connection.ops.spatial_function_name('LengthSpheroid')
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
dim = min(f.dim for f in self.get_source_fields() if f)
if dim > 2:
function = connection.ops.length3d
return super(Length, clone).as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection):
function = 'GeodesicLength' if self.spheroid else 'GreatCircleLength'
return super().as_sql(compiler, connection, function=function, **extra_context)
class LineLocatePoint(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class MakeValid(GeoFunc):
pass
class MemSize(GeoFunc):
output_field = IntegerField()
arity = 1
class NumGeometries(GeoFunc):
output_field = IntegerField()
arity = 1
class NumPoints(GeoFunc):
output_field = IntegerField()
arity = 1
class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
arity = 1
def as_postgresql(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection) and not self.source_is_geography():
raise NotSupportedError("ST_Perimeter cannot use a non-projected non-geography field.")
dim = min(f.dim for f in self.get_source_fields())
if dim > 2:
function = connection.ops.perimeter3d
return super().as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
raise NotSupportedError("Perimeter cannot use a non-projected field.")
return super().as_sql(compiler, connection, **extra_context)
class PointOnSurface(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Reverse(GeoFunc):
arity = 1
class Scale(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, x, y, z=0.0, **extra):
expressions = [
expression,
self._handle_param(x, 'x', NUMERIC_TYPES),
self._handle_param(y, 'y', NUMERIC_TYPES),
]
if z != 0.0:
expressions.append(self._handle_param(z, 'z', NUMERIC_TYPES))
super().__init__(*expressions, **extra)
class SnapToGrid(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, *args, **extra):
nargs = len(args)
expressions = [expression]
if nargs in (1, 2):
expressions.extend(
[self._handle_param(arg, '', NUMERIC_TYPES) for arg in args]
)
elif nargs == 4:
# Reverse origin and size param ordering
expressions += [
*(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[2:]),
*(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[0:2]),
]
else:
raise ValueError('Must provide 1, 2, or 4 arguments to `SnapToGrid`.')
super().__init__(*expressions, **extra)
class SymDifference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class Transform(GeomOutputGeoFunc):
def __init__(self, expression, srid, **extra):
expressions = [
expression,
self._handle_param(srid, 'srid', int),
]
if 'output_field' not in extra:
extra['output_field'] = GeometryField(srid=srid)
super().__init__(*expressions, **extra)
class Translate(Scale):
def as_sqlite(self, compiler, connection, **extra_context):
clone = self.copy()
if len(self.source_expressions) < 4:
# Always provide the z parameter for ST_Translate
clone.source_expressions.append(Value(0))
return super(Translate, clone).as_sqlite(compiler, connection, **extra_context)
class Union(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
| apache-2.0 | 7,931,150,428,917,694,000 | 35.24359 | 117 | 0.635538 | false |
himaaaatti/qtile | libqtile/widget/backlight.py | 1 | 3044 | # Copyright (c) 2012 Tim Neumann
# Copyright (c) 2012, 2014 Tycho Andersen
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 Sean Vig
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
from . import base
from libqtile.log_utils import logger
BACKLIGHT_DIR = '/sys/class/backlight'
FORMAT = '{percent: 2.0%}'
class Backlight(base.InLoopPollText):
"""
A simple widget to show the current brightness of a monitor.
"""
filenames = {}
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
('backlight_name', 'acpi_video0', 'ACPI name of a backlight device'),
(
'brightness_file',
'brightness',
'Name of file with the '
'current brightness in /sys/class/backlight/backlight_name'
),
(
'max_brightness_file',
'max_brightness',
'Name of file with the '
'maximum brightness in /sys/class/backlight/backlight_name'
),
('update_interval', .2, 'The delay in seconds between updates'),
]
def __init__(self, **config):
base.InLoopPollText.__init__(self, **config)
self.add_defaults(Backlight.defaults)
def _load_file(self, name):
try:
path = os.path.join(BACKLIGHT_DIR, self.backlight_name, name)
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return False
except Exception:
logger.exception("Failed to get %s" % name)
def _get_info(self):
try:
info = {
'brightness': float(self._load_file(self.brightness_file)),
'max': float(self._load_file(self.max_brightness_file)),
}
except TypeError:
return False
return info
def poll(self):
info = self._get_info()
if info is False:
return 'Error'
percent = info['brightness'] / info['max']
return FORMAT.format(percent=percent)
| mit | 7,803,237,369,298,870,000 | 32.822222 | 79 | 0.638962 | false |
raphaelrubino/nid | nn/mono/run.py | 1 | 1143 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import numpy as np
np.random.seed( 1337 )
import data_utils
from nid import Neural_information_density
import sys
if __name__ == '__main__':
if len( sys.argv ) != 9:
print( "\nUsage: ", sys.argv[ 0 ], "<context> <target> <vocabulary> <embedding size> <dropout> <batch size> <epochs> <output model>\n" )
exit()
context, target, vocab, embedding, dropout, batch, epoch, out_model = sys.argv[ 1: ]
embedding = np.int( embedding )
dropout = np.float( dropout )
batch = np.int( batch )
epoch = np.int( epoch )
print( "Loading vocabulary" )
vocab, max_features = data_utils.load_vocab( vocab )
print( "Loading contexts" )
context = data_utils.load_context( context )
print( "Loading targets" )
target = data_utils.load_target( target ) #, max_features )
max_length = context.shape[ 1 ]
validation_size = 0.25
print( "Data loaded" )
nid = Neural_information_density( context, target, max_features, max_length, batch, validation_size )
print( "Data prepared" )
print( "Training" )
nid.train( embedding, dropout, epoch, out_model )
| mit | 8,807,348,404,547,177,000 | 26.878049 | 138 | 0.677165 | false |
komuW/sewer | sewer/catalog.py | 1 | 2472 | import codecs, importlib, json, os
from typing import Dict, List, Sequence
from .auth import ProviderBase
class ProviderDescriptor:
def __init__(
self,
*,
name: str,
desc: str,
chals: Sequence[str],
args: Sequence[Dict[str, str]],
deps: Sequence[str],
path: str = None,
cls: str = None,
features: Sequence[str] = None,
memo: str = None,
) -> None:
"initialize a driver descriptor from one item in the catalog"
self.name = name
self.desc = desc
self.chals = chals
self.args = args
self.deps = deps
self.path = path
self.cls = cls
self.features = [] if features is None else features
self.memo = memo
def __str__(self) -> str:
return "Descriptor %s" % self.name
def get_provider(self) -> ProviderBase:
"return the class that implements this driver"
module_name = self.path if self.path else ("sewer.providers." + self.name)
module = importlib.import_module(module_name)
return getattr(module, self.cls if self.cls else "Provider")
class ProviderCatalog:
def __init__(self, filepath: str = "") -> None:
"intialize a catalog from either the default catalog.json or one named by filepath"
if not filepath:
here = os.path.abspath(os.path.dirname(__file__))
filepath = os.path.join(here, "catalog.json")
with codecs.open(filepath, "r", encoding="utf8") as f:
raw_catalog = json.load(f)
items = {} # type: Dict[str, ProviderDescriptor]
for item in raw_catalog:
k = item["name"]
if k in items:
print("WARNING: duplicate name %s skipped in catalog %s" % (k, filepath))
else:
items[k] = ProviderDescriptor(**item)
self.items = items
def get_item_list(self) -> List[ProviderDescriptor]:
"return the list of items in the catalog, sorted by name"
res = [i for i in self.items.values()]
res.sort(key=lambda i: i.name)
return res
def get_descriptor(self, name: str) -> ProviderDescriptor:
"return the ProviderDescriptor that matches name"
return self.items[name]
def get_provider(self, name: str) -> ProviderBase:
"return the class that implements the named driver"
return self.get_descriptor(name).get_provider()
| mit | -5,405,137,072,739,433,000 | 30.692308 | 91 | 0.59021 | false |
wienerschnitzel/schnitzelserver | schnitzelserver/session/session.py | 1 | 4940 | import logging
import enum
from sqlalchemy import or_
from sqlalchemy import inspect
from schnitzelserver.moduleindex import ModuleIndex
from schnitzelserver.session.grant_access import SessionGrant
from schnitzelserver.pool import ModelPool, ViewPool
logger = logging.getLogger(__name__)
class AccessTypes(enum.Enum):
read = 'read'
write = 'write'
create = 'create'
delete = 'delete'
class Session():
"""
A Schnitzel Session
"""
def __init__(self, sql_session, user, model_pool: ModelPool, view_pool: ViewPool, module_index: ModuleIndex):
if user is None:
raise ValueError()
self._module_index = module_index or ModuleIndex()
self._model_pool = model_pool
self._user = user
self._sql_session = sql_session
self._granted_access = {}
self._view_pool = view_pool
view_pool = property(lambda self: self._view_pool)
module_index = property(lambda self: self._module_index)
model_pool = property(lambda self: self._model_pool)
user = property(lambda self: self._user)
sql_session = property(lambda self: self._sql_session)
def grant(self, model, field=None, access_type='read'):
return SessionGrant(self, model, field, access_type)
def model_access(self, model_name, field_name=None, model_id=None,
access_type=AccessTypes.read):
"""
Check user rights on a given model
:param model_name:
:param field_name:
:param model_id:
:return: set of access_rights at maximum:
{'read', 'write', 'create', 'delete'}
"""
field_match = lambda field: field == field_name
id_match = lambda _id: _id == model_id
# Query local
grants = self._granted_access.get(model_name, set())
for _id, _field, _type in grants:
if id_match(_id) and field_match(_field) and _type == access_type.value:
return True
# Query database
if self.user.groups:
Access = self.model_pool['schnitzel_model_access']
rules = self._sql_session.query(Access).filter(
Access.model_name == model_name,
Access.group_id.in_([g.id for g in self.user.groups]),
or_(Access.model_id == model_id, Access.model_id.is_(None)),
or_(Access.field_name == field_name, Access.field_name.is_(None)),
Access.access_type == access_type.value
).all()
if rules:
# cache
# for rule in rules:
# grants.add(
# (rule.model_id, rule.field_name, rule.access_type))
# self._granted_access[model_name] = grants
return True
else:
return False
def get_model_instance(self, model_name, model_id):
if not self.model_access(model_name, model_id=model_id):
print([s.id for s in self._sql_session.query(self.model_pool[model_name]).all()])
raise PermissionError("User {} does not have permission to read {} ({})".format(
self.user.username, model_name, model_id
))
instance = self._sql_session.query(self.model_pool[model_name]).get(model_id)
if instance is None:
raise ValueError("There does not exist an instance of {} with id {}".format(model_name, model_id))
return instance
def get_model_instance_as_dict(self, model_name, model_id):
model = self.model_pool[model_name]
instance = self.get_model_instance(model_name, model_id)
return {
name: getattr(instance, name) for name in inspect(model).columns.keys()
}
def create(self, model_name: str, dct: dict):
if not self.model_access(model_name, access_type=AccessTypes.create):
raise PermissionError('No creation rights on {}'.format(model_name))
new_model = self._model_pool[model_name](**dct)
self._sql_session.add(new_model)
def update(self, model_name: str, model_id: int, dct: dict):
missing_field_permissions = [field for field in dct if not self.model_access(model_name, field,
model_id, AccessTypes.write)]
if missing_field_permissions:
raise PermissionError('No write-rights on {} ({}) for fields: {}'.format(
model_name, model_id, missing_field_permissions
))
model = self._model_pool[model_name] # TODO: what about sql sessions and stuff?
model.update().where(model.id == model_id).values(**dct)
def add_module_entries(self, module_name):
db_entries = self.module_index[module_name].entries
for name, entries in db_entries.items():
for entry in entries:
self.create(name, entry) | lgpl-3.0 | 7,753,447,680,131,350,000 | 38.846774 | 114 | 0.591498 | false |
cprakashagr/PythonClass | src/maths/Haversine.py | 1 | 1450 | from math import radians, cos, sin, asin, sqrt
import time
current_milli_time = lambda: int(round(time.time() * 1000))
def haversine(point1, point2, miles = False):
AVG_EARTH_RADIUS = 6371
lat1, lng1 = point1
lat2, lng2 = point2
# convert all latitudes/longitudes from decimal degrees to radians
lat1, lng1, lat2, lng2 = map(radians, (lat1, lng1, lat2, lng2))
# calculate haversine
lat = lat2 - lat1
lng = lng2 - lng1
d = sin(lat * 0.5) ** 2 + cos(lat1) * cos(lat2) * sin(lng * 0.5) ** 2
h = 2 * AVG_EARTH_RADIUS * asin(sqrt(d))
if miles:
return h * 0.621371 # in miles
else:
return h
pass
def main():
# lyon = (45.7597, 4.8422)
# paris = (48.8567, 2.3508)
lyon = (12.9210784, 77.6936946) # Saroj
paris = (12.9132164, 77.6234387) # Snapwiz
totalDelay = 0
start = current_milli_time()
for i in range(0,300000,1):
# print i
start = current_milli_time()
dist = haversine(lyon, paris)
end = current_milli_time()
delay = start-end
if delay > 0:
totalDelay += delay
end = current_milli_time()
print end
print start
print "That's All. Total Delay: " + str(end-start)
# time.sleep(5)
# start = time.time()
# print (haversine(lyon, paris, miles=True))
# end = time.time()
# print "%.20f" % start-end
pass
if __name__ == '__main__':
main()
| mit | -8,202,962,493,890,972,000 | 22.015873 | 73 | 0.570345 | false |
dedupeio/dedupe-examples | pgsql_big_dedupe_example/pgsql_big_dedupe_example_init_db.py | 1 | 10090 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a setup script for mysql_example. It downloads a zip file of
Illinois campaign contributions and loads them in t aMySQL database
named 'contributions'.
__Note:__ You will need to run this script first before execuing
[mysql_example.py](http://datamade.github.com/dedupe-examples/docs/mysql_example.html).
Tables created:
* raw_table - raw import of entire CSV file
* donors - all distinct donors based on name and address
* recipients - all distinct campaign contribution recipients
* contributions - contribution amounts tied to donor and recipients tables
"""
import csv
import os
import zipfile
import dj_database_url
import psycopg2
import psycopg2.extras
import unidecode
import requests
_file = 'Illinois-campaign-contributions'
contributions_zip_file = _file + '.txt.zip'
contributions_txt_file = _file + '.txt'
contributions_csv_file = _file + '.csv'
if not os.path.exists(contributions_zip_file):
print('downloading', contributions_zip_file, '(~60mb) ...')
u = requests.get(
'https://s3.amazonaws.com/dedupe-data/Illinois-campaign-contributions.txt.zip')
localFile = open(contributions_zip_file, 'wb')
localFile.write(u.content)
localFile.close()
if not os.path.exists(contributions_txt_file):
zip_file = zipfile.ZipFile(contributions_zip_file, 'r')
print('extracting %s' % contributions_zip_file)
zip_file_contents = zip_file.namelist()
for f in zip_file_contents:
if ('.txt' in f):
zip_file.extract(f)
zip_file.close()
# Create a cleaned up CSV version of file with consistent row lengths.
# Postgres COPY doesn't handle "ragged" files very well
if not os.path.exists(contributions_csv_file):
print('converting tab-delimited raw file to csv...')
with open(contributions_txt_file, 'rU') as txt_file, \
open(contributions_csv_file, 'w') as csv_file:
csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
for line in txt_file:
if not all(ord(c) < 128 for c in line):
line = unidecode.unidecode(line)
row = line.rstrip('\t\r\n').split('\t')
if len(row) != 29:
print('skipping bad row (length %s, expected 29):' % len(row))
print(row)
continue
csv_writer.writerow(row)
db_conf = dj_database_url.config()
if not db_conf:
raise Exception(
'set DATABASE_URL environment variable with your connection, e.g. '
'export DATABASE_URL=postgres://user:password@host/mydatabase'
)
conn = psycopg2.connect(database=db_conf['NAME'],
user=db_conf['USER'],
password=db_conf['PASSWORD'],
host=db_conf['HOST'],
port=db_conf['PORT'])
c = conn.cursor()
print('importing raw data from csv...')
c.execute("DROP TABLE IF EXISTS raw_table")
c.execute("DROP TABLE IF EXISTS donors")
c.execute("DROP TABLE IF EXISTS recipients")
c.execute("DROP TABLE IF EXISTS contributions")
c.execute("DROP TABLE IF EXISTS processed_donors")
c.execute("CREATE TABLE raw_table "
"(reciept_id INT, last_name VARCHAR(70), first_name VARCHAR(35), "
" address_1 VARCHAR(35), address_2 VARCHAR(36), city VARCHAR(20), "
" state VARCHAR(15), zip VARCHAR(11), report_type VARCHAR(24), "
" date_recieved VARCHAR(10), loan_amount VARCHAR(12), "
" amount VARCHAR(23), receipt_type VARCHAR(23), "
" employer VARCHAR(70), occupation VARCHAR(40), "
" vendor_last_name VARCHAR(70), vendor_first_name VARCHAR(20), "
" vendor_address_1 VARCHAR(35), vendor_address_2 VARCHAR(31), "
" vendor_city VARCHAR(20), vendor_state VARCHAR(10), "
" vendor_zip VARCHAR(10), description VARCHAR(90), "
" election_type VARCHAR(10), election_year VARCHAR(10), "
" report_period_begin VARCHAR(10), report_period_end VARCHAR(33), "
" committee_name VARCHAR(70), committee_id VARCHAR(37))")
conn.commit()
with open(contributions_csv_file, 'rU') as csv_file:
c.copy_expert("COPY raw_table "
"(reciept_id, last_name, first_name, "
" address_1, address_2, city, state, "
" zip, report_type, date_recieved, "
" loan_amount, amount, receipt_type, "
" employer, occupation, vendor_last_name, "
" vendor_first_name, vendor_address_1, "
" vendor_address_2, vendor_city, vendor_state, "
" vendor_zip, description, election_type, "
" election_year, "
" report_period_begin, report_period_end, "
" committee_name, committee_id) "
"FROM STDIN CSV HEADER", csv_file)
conn.commit()
print('creating donors table...')
c.execute("CREATE TABLE donors "
"(donor_id SERIAL PRIMARY KEY, "
" last_name VARCHAR(70), first_name VARCHAR(35), "
" address_1 VARCHAR(35), address_2 VARCHAR(36), "
" city VARCHAR(20), state VARCHAR(15), "
" zip VARCHAR(11), employer VARCHAR(70), "
" occupation VARCHAR(40))")
c.execute("INSERT INTO donors "
"(first_name, last_name, address_1, "
" address_2, city, state, zip, employer, occupation) "
"SELECT DISTINCT "
"LOWER(TRIM(first_name)), LOWER(TRIM(last_name)), "
"LOWER(TRIM(address_1)), LOWER(TRIM(address_2)), "
"LOWER(TRIM(city)), LOWER(TRIM(state)), LOWER(TRIM(zip)), "
"LOWER(TRIM(employer)), LOWER(TRIM(occupation)) "
"FROM raw_table")
conn.commit()
print('creating indexes on donors table...')
c.execute("CREATE INDEX donors_donor_info ON donors "
"(last_name, first_name, address_1, address_2, city, "
" state, zip)")
conn.commit()
print('creating recipients table...')
c.execute("CREATE TABLE recipients "
"(recipient_id SERIAL PRIMARY KEY, name VARCHAR(70))")
c.execute("INSERT INTO recipients "
"SELECT DISTINCT CAST(committee_id AS INTEGER), "
"committee_name FROM raw_table")
conn.commit()
print('creating contributions table...')
c.execute("CREATE TABLE contributions "
"(contribution_id INT, donor_id INT, recipient_id INT, "
" report_type VARCHAR(24), date_recieved DATE, "
" loan_amount VARCHAR(12), amount VARCHAR(23), "
" receipt_type VARCHAR(23), "
" vendor_last_name VARCHAR(70), "
" vendor_first_name VARCHAR(20), "
" vendor_address_1 VARCHAR(35), vendor_address_2 VARCHAR(31), "
" vendor_city VARCHAR(20), vendor_state VARCHAR(10), "
" vendor_zip VARCHAR(10), description VARCHAR(90), "
" election_type VARCHAR(10), election_year VARCHAR(10), "
" report_period_begin DATE, report_period_end DATE)")
c.execute("INSERT INTO contributions "
"SELECT reciept_id, donors.donor_id, CAST(committee_id AS INTEGER), "
" report_type, TO_DATE(TRIM(date_recieved), 'MM/DD/YYYY'), "
" loan_amount, amount, "
" receipt_type, vendor_last_name , "
" vendor_first_name, vendor_address_1,"
" vendor_address_2, "
" vendor_city, vendor_state, vendor_zip,"
" description, "
" election_type, election_year, "
" TO_DATE(TRIM(report_period_begin), 'MM/DD/YYYY'), "
" TO_DATE(TRIM(report_period_end), 'MM/DD/YYYY') "
"FROM raw_table JOIN donors ON "
"donors.first_name = LOWER(TRIM(raw_table.first_name)) AND "
"donors.last_name = LOWER(TRIM(raw_table.last_name)) AND "
"donors.address_1 = LOWER(TRIM(raw_table.address_1)) AND "
"donors.address_2 = LOWER(TRIM(raw_table.address_2)) AND "
"donors.city = LOWER(TRIM(raw_table.city)) AND "
"donors.state = LOWER(TRIM(raw_table.state)) AND "
"donors.employer = LOWER(TRIM(raw_table.employer)) AND "
"donors.occupation = LOWER(TRIM(raw_table.occupation)) AND "
"donors.zip = LOWER(TRIM(raw_table.zip))")
conn.commit()
print('creating indexes on contributions...')
c.execute("ALTER TABLE contributions ADD PRIMARY KEY(contribution_id)")
c.execute("CREATE INDEX donor_idx ON contributions (donor_id)")
c.execute("CREATE INDEX recipient_idx ON contributions (recipient_id)")
conn.commit()
print('nullifying empty strings in donors...')
c.execute(
"UPDATE donors "
"SET "
"first_name = CASE first_name WHEN '' THEN NULL ELSE first_name END, "
"last_name = CASE last_name WHEN '' THEN NULL ELSE last_name END, "
"address_1 = CASE address_1 WHEN '' THEN NULL ELSE address_1 END, "
"address_2 = CASE address_2 WHEN '' THEN NULL ELSE address_2 END, "
"city = CASE city WHEN '' THEN NULL ELSE city END, "
"state = CASE state WHEN '' THEN NULL ELSE state END, "
"employer = CASE employer WHEN '' THEN NULL ELSE employer END, "
"occupation = CASE occupation WHEN '' THEN NULL ELSE occupation END, "
"zip = CASE zip WHEN '' THEN NULL ELSE zip END"
)
conn.commit()
print('creating processed_donors...')
c.execute("CREATE TABLE processed_donors AS "
"(SELECT donor_id, "
" LOWER(city) AS city, "
" CASE WHEN (first_name IS NULL AND last_name IS NULL) "
" THEN NULL "
" ELSE LOWER(CONCAT_WS(' ', first_name, last_name)) "
" END AS name, "
" LOWER(zip) AS zip, "
" LOWER(state) AS state, "
" CASE WHEN (address_1 IS NULL AND address_2 IS NULL) "
" THEN NULL "
" ELSE LOWER(CONCAT_WS(' ', address_1, address_2)) "
" END AS address, "
" LOWER(occupation) AS occupation, "
" LOWER(employer) AS employer, "
" CAST((first_name IS NULL) AS INTEGER) AS person "
" FROM donors)")
c.execute("CREATE INDEX processed_donor_idx ON processed_donors (donor_id)")
conn.commit()
c.close()
conn.close()
print('done')
| mit | -596,923,599,316,805,100 | 39.522088 | 87 | 0.615461 | false |
yunli2004/OpenClos | jnpr/openclos/util.py | 2 | 8115 | '''
Created on Aug 21, 2014
@author: moloyc
'''
import re
import os
import yaml
import platform
import datetime
import shutil
from netaddr import IPNetwork
import netifaces
from propLoader import propertyFileLocation
TWO_STAGE_CONFIGURATOR_DEFAULT_ATTEMPT=5
TWO_STAGE_CONFIGURATOR_DEFAULT_INTERVAL=30 # in seconds
TWO_STAGE_CONFIGURATOR_DEFAULT_VCP_LLDP_DELAY=40 # in seconds
def loadClosDefinition(closDefination = os.path.join(propertyFileLocation, 'closTemplate.yaml')):
'''
Loads clos definition from yaml file
'''
try:
stream = open(closDefination, 'r')
yamlStream = yaml.load(stream)
return yamlStream
except (OSError, IOError) as e:
print "File error:", e
except (yaml.scanner.ScannerError) as e:
print "YAML error:", e
stream.close()
finally:
pass
def isPlatformUbuntu():
#return 'ubuntu' in platform.platform().lower()
result = os.popen("grep -i ubuntu /etc/*-release").read()
return result is not None and len(result) > 0
def isPlatformCentos():
#return 'centos' in platform.platform().lower()
result = os.popen("grep -i centos /etc/*-release").read()
return result is not None and len(result) > 0
def isPlatformWindows():
return 'windows' in platform.platform().lower()
def backupDatabase(conf):
if conf is not None and 'dbUrl' in conf:
match = re.match(r"sqlite:\/\/\/(.*)", conf['dbUrl'])
if match is not None:
dbFileName = match.group(1)
if dbFileName != '':
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
backupDbFileName = dbFileName + '.' + timestamp
shutil.copyfile(dbFileName, backupDbFileName)
def getMgmtIps(prefix, startingIP, mask, count):
'''
returns list of management IP for given number of devices
Keyword arguments:
prefix -- ip prefix, example 1.2.3.4/24
count -- number of devices
'''
mgmtIps = []
cidr = None
if startingIP is not None and mask is not None:
cidr = startingIP + '/' + str(mask)
else:
cidr = prefix
if cidr is not None:
ipNetwork = IPNetwork(cidr)
ipNetworkList = list(ipNetwork)
start = ipNetworkList.index(ipNetwork.ip)
end = start + count
ipList = ipNetworkList[start:end]
for ip in ipList:
mgmtIps.append(str(ip) + '/' + str(ipNetwork.prefixlen))
return mgmtIps
def getMgmtIpsForLeaf():
return []
def isZtpStaged(conf):
if conf is not None and conf.get('deploymentMode') is not None:
return conf['deploymentMode'].get('ztpStaged', False)
return False
def getZtpStagedInterval(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpStagedInterval', TWO_STAGE_CONFIGURATOR_DEFAULT_INTERVAL)
else:
return None
def getZtpStagedAttempt(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpStagedAttempt', TWO_STAGE_CONFIGURATOR_DEFAULT_ATTEMPT)
else:
return None
def getTwoStageConfigurationCallback(conf):
if isZtpStaged(conf) == True:
return conf.get('twoStageConfigurationCallback')
else:
return None
def getVcpLldpDelay(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpVcpLldpDelay', TWO_STAGE_CONFIGURATOR_DEFAULT_VCP_LLDP_DELAY)
else:
return None
def enumerateRoutableIpv4Addresses():
addrs = []
intfs = netifaces.interfaces()
for intf in intfs:
if intf != 'lo':
addrDict = netifaces.ifaddresses(intf)
ipv4AddrInfoList = addrDict.get(netifaces.AF_INET)
if ipv4AddrInfoList is not None:
for ipv4AddrInfo in ipv4AddrInfoList:
addrs.append(ipv4AddrInfo['addr'])
return addrs
def getImageNameForDevice(pod, device):
if device.role == 'spine':
return pod.spineJunosImage
elif device.role == 'leaf':
for leafSetting in pod.leafSettings:
if leafSetting.deviceFamily == device.family:
return leafSetting.junosImage
return None
otherPortRegx = re.compile(r"[0-9A-Za-z]+\.?(\d{0,2})")
def interfaceNameToUniqueSequenceNumber(interfaceName):
'''
:param str: name, examples:
IFD: et-0/0/1, et-0/0/0, et-0/0/101, lo0, irb, vme
IFL: et-0/0/1.0, et-0/0/0.0, et-0/0/0.99, lo0.0
IFD with fake name: uplink-0, uplink-1
IFL with fake name: uplink-0.0, uplink-1.0, uplink-1.99
'''
if interfaceName is None or interfaceName == '':
return None
sequenceNum = _matchFpcPicPort(interfaceName)
if sequenceNum != None:
return sequenceNum
sequenceNum = _matchFakeName(interfaceName)
if sequenceNum != None:
return sequenceNum
match = otherPortRegx.match(interfaceName)
if match is not None:
return int(interfaceName.encode('hex'), 16)
fpcPicPortRegx = re.compile(r"([a-z]+)-(\d)\/(\d)\/(\d{1,3})\.?(\d{0,2})")
def _matchFpcPicPort(interfaceName):
match = fpcPicPortRegx.match(interfaceName)
if match is not None:
speed = match.group(1)
fpc = match.group(2)
pic = match.group(3)
port = match.group(4)
unit = match.group(5)
if not unit:
unit = 0
if 'et' in speed:
speedInt = 1
elif 'xe' in speed:
speedInt = 2
elif 'ge' in speed:
speedInt = 3
else:
speedInt = 4
sequenceNum = 100000 * speedInt + 10000 * int(fpc) + 1000 * int(pic) + int(port)
if unit != 0:
sequenceNum = 100 * sequenceNum + int(unit)
return sequenceNum
fakeNameRegxList = [(re.compile(r"uplink-(\d{1,3})\.?(\d{0,2})"), 90000000, 91000000),
(re.compile(r"access-(\d{1,3})\.?(\d{0,2})"), 92000000, 93000000)
]
def _matchFakeName(interfaceName):
for fakeNameRegx, intfStart, subIntfStart in fakeNameRegxList:
match = fakeNameRegx.match(interfaceName)
if match is not None:
port = match.group(1)
unit = match.group(2)
if not unit:
unit = 0
sequenceNum = intfStart + int(port)
if unit != 0:
sequenceNum = subIntfStart + 100 * int(port) + int(unit)
return sequenceNum
def getPortNumberFromName(interfaceName):
match = fpcPicPortRegx.match(interfaceName)
if match is not None:
return match.group(4)
def replaceFpcNumberOfInterfaces(interfaceNames, newFpc):
fixedInterfaceNames = []
for interfaceName in interfaceNames:
match = fpcRegx.match(interfaceName)
if match is not None:
fixedInterfaceNames.append(match.group(1) + '-' + newFpc + '/' + match.group(3))
return fixedInterfaceNames
fpcRegx = re.compile(r"([a-z]+)-(\d)\/(.*)")
def replaceFpcNumberOfInterface(interfaceName, newFpc):
match = fpcRegx.match(interfaceName)
if match is not None:
return match.group(1) + '-' + newFpc + '/' + match.group(3)
def getOutFolderPath(conf, ipFabric):
if 'outputDir' in conf:
outputDir = os.path.join(conf['outputDir'], ipFabric.id+'-'+ipFabric.name)
else:
outputDir = os.path.join('out', ipFabric.id+'-'+ipFabric.name)
return outputDir
def createOutFolder(conf, ipFabric):
path = getOutFolderPath(conf, ipFabric)
if not os.path.exists(path):
os.makedirs(path)
return path
def deleteOutFolder(conf, ipFabric):
path = getOutFolderPath(conf, ipFabric)
shutil.rmtree(path, ignore_errors=True)
def stripNetmaskFromIpString(ipString):
pos = ipString.find('/')
if pos != -1:
return ipString[:pos]
else:
return ipString
def stripPlusSignFromIpString(ipString):
pos = ipString.find('+')
if pos != -1:
return ipString[:pos]
else:
return ipString
| apache-2.0 | 3,603,975,910,876,883,000 | 29.855513 | 107 | 0.616266 | false |
xncbf/authome | log/views.py | 1 | 3426 | from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.urls import reverse
from django.db import connection
from django.shortcuts import render, HttpResponse
from django.utils import timezone
from django.views.generic.list import View
from dev.models import MacroLog, UserPage
from utils.services import dictfetchall
class Log(LoginRequiredMixin, View):
template_name = "log/log.html"
login_url = '/accounts/login/'
def get(self, request, *args, **kwargs):
context = {}
qs = MacroLog.objects.filter(macro__user=request.user).order_by('macro', 'user', '-created').distinct('macro',
'user')
unsorted_results = qs.all()
context['macroLog'] = sorted(unsorted_results, key=lambda t: t.created, reverse=True)
context['userPage'] = UserPage.objects.filter(macro__user=request.user).distinct('user')
return render(self.request, self.template_name, context)
def post(self, request, *args, **kwargs):
with connection.cursor() as cursor:
if request.is_ajax():
ddl_user = ','.join(request.POST.get('ddlUser').split(','))
if ddl_user:
where_str = 'AND ML.user_id IN ({0})'.format(ddl_user)
else:
where_str = ''
cursor.execute("""SELECT
ML.macro_id,
ML.created,
ML.ip,
M.title,
U.email
FROM main_macrolog ML
LEFT JOIN main_macro M ON M.id = ML.macro_id
LEFT JOIN auth_user U ON U.id = ML.user_id
WHERE M.user_id = '{0}' {1}
ORDER BY ML.created DESC
LIMIT 20""".format(request.user.pk, where_str))
obj = dictfetchall(cursor)
result = self.set_html(obj)
return HttpResponse(result)
def set_html(self, obj, html=''):
for e in obj:
user = User.objects.get(email=e.get('email'))
local_time = timezone.localtime(e.get('created'))
if user.socialaccount_set.all():
profile_url = user.socialaccount_set.all()[0].get_avatar_url()
else:
profile_url = static('images/Jigglypuff.png')
html += """<li class="collection-item user-list">
<a href="{0}">
<div>{1}</div>
<div class="chip">
<img src="{2}">{3}
</div>
<span class="secondary-content">{4}<br>{5}</span>
</a>
</li>""".format(reverse('user_manage', kwargs={'macro_id': e.get('macro_id')}),
e.get('title') or '제목없음',
profile_url,
e.get('email'),
e.get('ip'),
local_time.strftime('%y-%m-%d %H:%M'))
if len(obj) == 0:
html = '<li class="collection-item user-list">사용 흔적이 없어요!</li>'
return html
| mit | 8,670,323,591,795,827,000 | 43.763158 | 118 | 0.496179 | false |
LabD/wagtail-personalisation | tests/factories/rule.py | 1 | 1032 | from __future__ import absolute_import, unicode_literals
import datetime
import factory
from wagtail_personalisation import rules
class DayRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.DayRule
class DeviceRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.DeviceRule
class QueryRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.QueryRule
class ReferralRuleFactory(factory.DjangoModelFactory):
regex_string = "test.test"
class Meta:
model = rules.ReferralRule
class TimeRuleFactory(factory.DjangoModelFactory):
start_time = datetime.time(8, 0, 0)
end_time = datetime.time(23, 0, 0)
class Meta:
model = rules.TimeRule
class VisitCountRuleFactory(factory.DjangoModelFactory):
operator = "more_than"
count = 0
class Meta:
model = rules.VisitCountRule
class OriginCountryRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.OriginCountryRule
| mit | 6,287,791,869,828,322,000 | 18.471698 | 59 | 0.719961 | false |
jfrfonseca/IndexadorDidaticoPython | fileIO.py | 1 | 11925 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
José F. R. Fonseca
See Attached License file
Controls the access to the disk. Defines the class DIskAccessControl,
an object to control the disk files. Multithread-writes the files
'''
import ast
import os
import time
from linecache import getline
from threading import Thread
'''
ONLY WRITES TO THE FILE WHEN THE CACHE OF LINES TO WRITE OVERCOMES
THIS MUCH BYTES, or if it is the last batch of files to be written.
'''
FILEACCESS_THRASHOLD = 1024*1024/32
'''
CLASSES
'''
class DiskAccessControl(): # @IgnorePep8
'''
Control the access to the disk, being the middleware to read and write the
index files
'''
def __init__(self, invertedIndexFileName, fileLen=None,
onMemInd=None, nameDict=None, mods=None):
'''
Instantiates the class as the only reference to the index files
:param invertedIndexFileName: string, name of the index file
:param fileLen: int, original number of lines of the index file, when
known
:param onMemInd: dictionary, on-memory index that translates terms
into file positions
:param nameDict: dictionary, on-memory index that translates the name
of each file indexed into a hash
into a hash
:param mods: dictionary, side-loaded modifications to be put into the
index manually.
'''
# loads the name of the index file
self.GENERATED_INVERTED_INDEX = invertedIndexFileName
# if there is a parameter fileLen, uses it. if not, counts the number
# of lines in the indexFile
if fileLen is None:
# print "GETTING THE FILE # OF LINES!"
lineNum = 0
# reads every line, and counts the number of lines in the index file @IgnorePep8
with open(self.GENERATED_INVERTED_INDEX, "r") as indFile:
for lineNum, dummy in enumerate(indFile):
pass
self.fileLength = lineNum + 1
else:
self.fileLength = fileLen
# if there is a parameter onMemInd, uses it. if not, loads it from the
# memory dump file metaindex
if onMemInd is None:
print "FILLING MEMORY INDEX WITH LAST SESSION'S!"
# OnMemoryIndex: dictionary that maps WORD to HashWord
# Loads the metaindex file into main memory, into onMemoryIndex attribute @IgnorePep8
with open("metaIndex-"+self.GENERATED_INVERTED_INDEX, "r") as metaindex: # @IgnorePep8
data = metaindex.read()
self.onMemoryIndex = ast.literal_eval(data)
else:
self.onMemoryIndex = onMemInd
# if there is a parameter namesDict, uses it. if not, loads it from the
# memory dump file namesDict, mapping a file name to its hash
if nameDict is None:
print "FILLING NAMES DICTIONARY WITH LAST SESSION'S!"
# Loads the namesDict file into main memory, into namesDict attribute @IgnorePep8
with open("namesDict-"+self.GENERATED_INVERTED_INDEX, "r") as namesDict: # @IgnorePep8
data = namesDict.read()
self.namesDictionary = ast.literal_eval(data)
else:
self.namesDictionary = nameDict
# if there is a parameter mods, uses it. if not, creates a new empty
# python dictionary to retain on-memory changes to the index
if mods is None:
self.modifications = {}
else:
self.modifications = mods
'''
METHODS ###############################################
'''
def getIndexLine(self, word):
'''
GETS a line of the index file, containing the inverted list of the word
provided. If inexistent, returns an empty list
:return a list containing the index data of the word requested.
It may be: the inverted list on the index, the modifications done
to such list in memory, or an empty list for a new term to be indexed
:param word: string to retrieve the index data of it, a term
'''
# if the word is on the onMemoryIndex, and thereby on the file index,
if word in self.onMemoryIndex.keys():
# retrieves the hash of the word in wrd
wrd = self.onMemoryIndex[word]
# if that word has already been modified, its modifications will be
# on main memory, and do not need to be retrieved from the disk.
if wrd not in self.modifications.keys():
try:
# retrieves a list of the data in the line of the index
# file on disk that contains the inverted index of the
# word, given its hash. The value of wrd must be
# summed with 1 because there is no line 0 on files
return ast.literal_eval(getline(self.GENERATED_INVERTED_INDEX, int(wrd)+1).split(":::")[1]) # @IgnorePep8
# prints-out eventual exceptions, as the hash searched in
# the index file, and the data recovered from it, as a string
# separated by "(|||)" rather than spaces
except:
print wrd, "(|||)", getline(self.GENERATED_INVERTED_INDEX, int(wrd)+1) # @IgnorePep8
else:
# returns the modifications to the index line, already on memory @IgnorePep8
return self.modifications[wrd]
# if the word searched is not in the index,
else:
# opens the index file, generates a new hash for the word to be
# indexed, and writes an empty list to the index file at the
# words's future position. Returns an empty list
with open(self.GENERATED_INVERTED_INDEX, "a") as indFile:
self.onMemoryIndex[word] = str(len(self.onMemoryIndex.keys())) # @IgnorePep8
indFile.write(self.onMemoryIndex[word]+":::"+"[]"+"\n")
self.fileLength += 1
return []
def pushIntoIndexFile(self, fileIndexedName, word, wordIndexTouple):
'''
Pushes the preshly produced inverted list of a term into the index
:param fileIndexedName: string, name of the file just indexed
:param word: string, term to be pushed into the index
:param wordIndexTouple: touple, containing the number of elements
in the positions list, and a (integer) positions list of occurences of
the term in the file indexed
'''
# gets the line of the index for the term pushed
indexLine = self.getIndexLine(word)
# if the file pushed has already been indexed before, recovers its
# hash name
if fileIndexedName in self.namesDictionary.keys():
hashName = self.namesDictionary[fileIndexedName]
# if not, creates a new hash for the file name, as a number
else:
self.namesDictionary[fileIndexedName] = hashName = str(len(self.namesDictionary.keys())) # @IgnorePep8
try:
# includes the index of the new file pushed into the respective
# line in the on memory inverted list of the term, avoiding
# repetitions. Includes the name of the file, the number of
# occurences and the positions the term indexed happens to occur.
indexLine.append((hashName, wordIndexTouple[0], (list(set(wordIndexTouple[1]))), )) # @IgnorePep8
# includes the freshly produced new index for the term in the
# on- memory modifications to be written on disk
self.modifications[self.onMemoryIndex[word]] = indexLine
# reveals an I/O error. bureaucracy
except IndexError:
print "Got an IndexError!"+str((word, self.onMemoryIndex[word], indexLine, )) # @IgnorePep8
def merge(self, outerModifications):
'''
Pushes provided modifications (made by another thread, for example,
into this instance's modifications list
:param outerModifications: dictionary, mapping terms to inverted lists,
are modifications to the index file imported from another instance
'''
# for each key of the outer modifications dictionary,
for outKey in outerModifications.keys():
if outKey in self.modifications.keys():
# if the key is on the current modifications list, joins the
# contents of both lists, and sorts by the hash of the terms
self.modifications[outKey].extend(outerModifications[outKey])
self.modifications[outKey] = sorted(self.modifications[outKey],
key=lambda mod: int(mod[0])) # @IgnorePep8
# if the outer key is not on the current modifications list,
# adds to it
else:
self.modifications[outKey] = outerModifications[outKey]
def dumpMetafiles(self):
'''
Dumps the on-memory metafiles, the dictionaries mapping terms to file
positions (hashes) and file names to hashes, to disk files.
'''
with open("metaIndex-"+self.GENERATED_INVERTED_INDEX, "w") as metaindex: # @IgnorePep8
metaindex.write(str(self.onMemoryIndex))
with open("namesDict-"+self.GENERATED_INVERTED_INDEX, "w") as namesDict: # @IgnorePep8
namesDict.write(str(self.namesDictionary))
def dumpMemory(self):
'''
Dumps the metafiles and writes the modifications to the index. It is,
by far, the most time-costly operation on the entire program, what was
to be expected, since it involves heavy file writting and reading.
'''
# Creates a new thread to write the metafiles concurrently
metafileWriter = Thread(target=self.dumpMetafiles)
metafileWriter.start()
# string writting buffer, to be written on the file
printString = ""
# for each modification on memory, got in order, writes on the string
# buffer, and when it gets full, writes to a temporary disk file the
# results of merging the modification on each line of the index,
# and the unmodified lines, ordered by the hashes of the terms
modKeys = sorted([k for k in self.modifications.keys()])
with open(self.GENERATED_INVERTED_INDEX, "r") as oldIndexFile: # @IgnorePep8
with open("TEMP_"+self.GENERATED_INVERTED_INDEX, "w+") as newIndexFile: # @IgnorePep8
for line in oldIndexFile:
# reads the data in the old index file line
lineNum = line.split(":::")[0]
# if the modifications line is to be written in the string
# writing buffer, because the read line was modified
if lineNum in modKeys: # @IgnorePep8
printString += lineNum+":::"+str(self.modifications[lineNum])+"\n" # @IgnorePep8
else:
# if the original index line is to be written on the
# file writing buffer, saves it
printString += line
# if the buffer is full to the threshold, writes it to
# the disk file
if len(printString) >= FILEACCESS_THRASHOLD:
newIndexFile.write(printString)
printString = ""
# renames the old inverted Index to become a backup
os.rename(self.GENERATED_INVERTED_INDEX, "Backup_"+str(time.time())+"_"+self.GENERATED_INVERTED_INDEX) # @IgnorePep8
# rename the new to replace the old one
os.rename("TEMP_"+self.GENERATED_INVERTED_INDEX, self.GENERATED_INVERTED_INDEX) # @IgnorePep8
# assures that the metafile writer thread is done writing
metafileWriter.join()
| gpl-2.0 | -3,419,447,673,354,494,500 | 49.312236 | 126 | 0.617243 | false |
uberVU/elasticboard | data_processor/api.py | 1 | 7727 | from functools import partial, wraps
import datetime
import queries
from utils import crossdomain
from flask import Flask, jsonify, request
from werkzeug.contrib.cache import MemcachedCache
cache = MemcachedCache(['127.0.0.1:11211'])
CACHE_TIMEOUT = 5 * 60
app = Flask(__name__)
# app.debug = True
CHART_INTERVALS = 6
def index_name(user, repo):
return '&'.join((user, repo))
@app.errorhandler(500)
def internal_error(error):
return "Not found or bad request", 400
# http://flask.pocoo.org/docs/patterns/viewdecorators/#caching-decorator
def cached(timeout=CACHE_TIMEOUT, key='view/%s'):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
cache_key = key % request.full_path # using full path for get params
rv = cache.get(cache_key)
if rv is not None:
return rv
rv = f(*args, **kwargs)
cache.set(cache_key, rv, timeout=timeout)
return rv
return decorated_function
return decorator
# api endpoints that call the queries
@app.route('/<owner>/<repo>/most_active_people')
@crossdomain(origin='*')
@cached()
def most_active_people(owner, repo):
index = index_name(owner, repo)
data = queries.most_active_people(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/total_events')
@crossdomain(origin='*')
@cached()
def total_events(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
data = queries.past_n_weeks(index, queries.total_events, CHART_INTERVALS)
elif mode == 'monthly':
data = queries.past_n_months(index, queries.total_events, CHART_INTERVALS)
else:
data = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=data)
@app.route('/<owner>/<repo>/most_active_issues')
@crossdomain(origin='*')
@cached()
def most_active_issues(owner, repo):
index = index_name(owner, repo)
data = queries.most_active_issues(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/untouched_issues')
@crossdomain(origin='*')
@cached()
def untouched_issues(owner, repo):
index = index_name(owner, repo)
label = request.args.get('label', None)
data = queries.untouched_issues(index, label)
return jsonify(data=data)
@app.route('/<owner>/<repo>/recent_events')
@crossdomain(origin='*')
@cached()
def recent_events(owner, repo):
index = index_name(owner, repo)
count = int(request.args.get('count', 200))
starting_from = int(request.args.get('starting_from', 0))
data = queries.recent_events(index, count, starting_from)
return jsonify(data=data)
@app.route('/available_repos')
@crossdomain(origin='*')
def available_repos():
data = sorted(queries.available_repos())
return jsonify(data=data)
@app.route('/<owner>/<repo>/issues_activity')
@crossdomain(origin='*')
@cached()
def issues_activity(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
opened = queries.past_n_weeks(index, partial(queries.issue_events_count, action='opened'), CHART_INTERVALS)
closed = queries.past_n_weeks(index, partial(queries.issue_events_count, action='closed'), CHART_INTERVALS)
data = {'opened': opened, 'closed': closed}
elif mode == 'monthly':
opened = queries.past_n_months(index, partial(queries.issue_events_count, action='opened'), CHART_INTERVALS)
closed = queries.past_n_months(index, partial(queries.issue_events_count, action='closed'), CHART_INTERVALS)
data = {'opened': opened, 'closed': closed}
else:
data = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=data)
@app.route('/<owner>/<repo>/issues_count')
@crossdomain(origin='*')
@cached()
def issues_count(owner, repo):
index = index_name(owner, repo)
open = queries.issues_count(index, 'open')
closed = queries.issues_count(index, 'closed')
data = {'open': open, 'closed': closed}
return jsonify(data=data)
@app.route('/<owner>/<repo>/pulls_count')
@crossdomain(origin='*')
@cached()
def pulls_count(owner, repo):
index = index_name(owner, repo)
count = queries.pulls_count(index)
data = {'open': count}
return jsonify(data=data)
@app.route('/<owner>/<repo>/inactive_issues')
@crossdomain(origin='*')
@cached()
def inactive_issues(owner, repo):
index = index_name(owner, repo)
label = request.args.get('label', None)
data = queries.inactive_issues(index, label)
return jsonify(data=data)
@app.route('/<owner>/<repo>/avg_issue_time')
@crossdomain(origin='*')
@cached()
def avg_issue_time(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
times = queries.past_n_weeks(index, queries.avg_issue_time, CHART_INTERVALS)
elif mode == 'monthly':
times = queries.past_n_months(index, queries.avg_issue_time, CHART_INTERVALS)
else:
times = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=times)
@app.route('/<owner>/<repo>/issues_involvement')
@crossdomain(origin='*')
@cached()
def issues_involvement(owner, repo):
index = index_name(owner, repo)
now = datetime.datetime.now()
month_start = now - datetime.timedelta(days=now.day)
data = queries.issues_involvement(index, start=month_start, end=now)
return jsonify(data=data)
@app.route('/<owner>/<repo>/milestones')
@crossdomain(origin='*')
@cached()
def milestones(owner, repo):
index = index_name(owner, repo)
milestones = queries.milestones(index)
return jsonify(data=milestones)
@app.route('/<owner>/<repo>/unassigned_issues')
@crossdomain(origin='*')
@cached()
def unassigned_issues(owner, repo):
index = index_name(owner, repo)
label = request.args.get('label', None)
issues = queries.unassigned_issues(index, label)
return jsonify(data=issues)
@app.route('/<owner>/<repo>/labels')
@crossdomain(origin='*')
@cached()
def labels(owner, repo):
index = index_name(owner, repo)
labels = queries.labels(index)
return jsonify(data=labels)
@app.route('/<owner>/<repo>/outstanding_pull_requests')
@crossdomain(origin='*')
@cached()
def outstanding_pull_requests(owner, repo):
index = index_name(owner, repo)
prs = queries.outstanding_pull_requests(index, limit=20)
return jsonify(data=prs)
@app.route('/<owner>/<repo>/popularity_evolution')
@crossdomain(origin='*')
@cached()
def popularity_evolution(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
data = queries.past_n_weeks(index, queries.popularity_events, CHART_INTERVALS)
elif mode == 'monthly':
data = queries.past_n_months(index, queries.popularity_events, CHART_INTERVALS)
else:
data = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=data)
@app.route('/<owner>/<repo>/collaborators')
@crossdomain(origin='*')
@cached()
def collaborators(owner, repo):
index = index_name(owner, repo)
data = queries.collaborators(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/pull_requests')
@crossdomain(origin='*')
@cached()
def pull_requests(owner, repo):
index = index_name(owner, repo)
data = queries.pull_requests(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/issue_distribution')
@crossdomain(origin='*')
@cached()
def issue_distribution(owner, repo):
index = index_name(owner, repo)
data = queries.issue_distribution(index)
return jsonify(data=data)
if __name__ == '__main__':
app.run(host='0.0.0.0', threaded=True)
| mit | -6,479,771,487,385,263,000 | 31.330544 | 116 | 0.671541 | false |
elhe/bread_diary_web | application/views.py | 1 | 1189 | from application import application, utils
from application.urls import DIARY_ADD_URL, DIARY_ALL_URL
from dateutil.parser import parser
from flask import render_template, request, url_for
from werkzeug.utils import redirect
__author__ = 'elhe'
@application.route('/', methods=['GET', ])
def index():
response = utils.send_http_request('get', DIARY_ALL_URL)
data = response.json()
for entry in data['entries']:
date_time = parser().parse(entry['date_time'])
entry['date_time'] = date_time.strftime('%d.%m %H:%M')
return render_template('index.html', entries=data['entries'])
@application.route('/add_food_entry', methods=['POST', 'GET'])
def add_food_entry():
if request.method == 'POST':
data = dict(name=request.form.get('food'),
weight=int(request.form.get('weight')),
date_time=request.form.get('date_time'),
bread_unit=int(request.form.get('bread_unit')),)
response = utils.send_json_request('post', DIARY_ADD_URL, data)
if response.status_code != 200:
return render_template('index.html', message='FAILS')
return redirect(url_for('index'))
| gpl-2.0 | 5,933,237,861,314,215,000 | 35.030303 | 71 | 0.637511 | false |
jaseg/python-mpv | setup.py | 1 | 1155 | #!/usr/bin/env python3
from setuptools import setup
setup(
name = 'python-mpv',
version = '0.5.1',
py_modules = ['mpv'],
description = 'A python interface to the mpv media player',
url = 'https://github.com/jaseg/python-mpv',
author = 'jaseg',
author_email = 'github@jaseg.net',
license = 'AGPLv3+',
extras_require = {
'screenshot_raw': ['Pillow']
},
tests_require = ['xvfbwrapper'],
test_suite = 'tests',
keywords = ['mpv', 'library', 'video', 'audio', 'player', 'display',
'multimedia'],
python_requires='>=3.5',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: C',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Sound/Audio :: Players',
'Topic :: Multimedia :: Video :: Display']
)
| agpl-3.0 | 5,502,061,537,492,389,000 | 34 | 93 | 0.575758 | false |
cstrong/ProjectCost | app/__init__.py | 1 | 1110 | #########################################
#
# Project Cost project
#
# pyapp/__init__.py
#
#########################################
from flask import Flask
from flask.ext.bootstrap import Bootstrap
# Monkey patch to make flask-triangle work, since it is not yet compatible with python3
import builtins
builtins.unicode = str
from flask.ext.triangle import Triangle
from flask.ext.restful import Api
from config import config
bootstrap = Bootstrap()
triangle = Triangle()
def create_app(config_name):
app = Flask(__name__)
app.debug = True
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
triangle.init_app(app)
api = Api(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .resources import resources as resources_blueprint
app.register_blueprint(resources_blueprint)
# register resources
from .resources import projects
api.add_resource(projects.Project, '/projects/<project_id>')
api.add_resource(projects.Projects, '/projects')
return app
| mit | 2,145,066,538,203,652,400 | 23.130435 | 87 | 0.672973 | false |
mahabuber/erpnext | erpnext/accounts/doctype/sales_invoice/sales_invoice.py | 2 | 26538 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.defaults
from frappe.utils import cint, flt
from frappe import _, msgprint, throw
from erpnext.accounts.party import get_party_account, get_due_date
from erpnext.controllers.stock_controller import update_gl_entries_after
from frappe.model.mapper import get_mapped_doc
from erpnext.controllers.selling_controller import SellingController
from erpnext.accounts.utils import get_account_currency
from erpnext.stock.doctype.delivery_note.delivery_note import update_billed_amount_based_on_so
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class SalesInvoice(SellingController):
def __init__(self, arg1, arg2=None):
super(SalesInvoice, self).__init__(arg1, arg2)
self.status_updater = [{
'source_dt': 'Sales Invoice Item',
'target_field': 'billed_amt',
'target_ref_field': 'amount',
'target_dt': 'Sales Order Item',
'join_field': 'so_detail',
'target_parent_dt': 'Sales Order',
'target_parent_field': 'per_billed',
'source_field': 'amount',
'join_field': 'so_detail',
'percent_join_field': 'sales_order',
'status_field': 'billing_status',
'keyword': 'Billed',
'overflow_type': 'billing'
}]
def set_indicator(self):
"""Set indicator for portal"""
if self.outstanding_amount > 0:
self.indicator_color = "orange"
self.indicator_title = _("Unpaid")
else:
self.indicator_color = "green"
self.indicator_title = _("Paid")
def validate(self):
super(SalesInvoice, self).validate()
self.validate_posting_time()
self.so_dn_required()
self.validate_proj_cust()
self.validate_with_previous_doc()
self.validate_uom_is_integer("stock_uom", "qty")
self.check_stop_or_close_sales_order("sales_order")
self.validate_debit_to_acc()
self.validate_fixed_asset_account()
self.clear_unallocated_advances("Sales Invoice Advance", "advances")
self.validate_advance_jv("Sales Order")
self.add_remarks()
self.validate_write_off_account()
if cint(self.is_pos):
self.validate_pos()
if cint(self.update_stock):
self.validate_dropship_item()
self.validate_item_code()
self.validate_warehouse()
self.update_current_stock()
self.validate_delivery_note()
if not self.is_opening:
self.is_opening = 'No'
self.set_against_income_account()
self.validate_c_form()
self.validate_time_logs_are_submitted()
self.validate_multiple_billing("Delivery Note", "dn_detail", "amount", "items")
self.update_packing_list()
def on_submit(self):
super(SalesInvoice, self).on_submit()
if cint(self.update_stock) == 1:
self.update_stock_ledger()
else:
# Check for Approving Authority
if not self.recurring_id:
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype,
self.company, self.base_grand_total, self)
self.check_prev_docstatus()
if self.is_return:
# NOTE status updating bypassed for is_return
self.status_updater = []
self.update_status_updater_args()
self.update_prevdoc_status()
self.update_billing_status_in_dn()
# this sequence because outstanding may get -ve
self.make_gl_entries()
if not self.is_return:
self.update_billing_status_for_zero_amount_refdoc("Sales Order")
self.check_credit_limit()
if not cint(self.is_pos) == 1 and not self.is_return:
self.update_against_document_in_jv()
self.update_time_log_batch(self.name)
def before_cancel(self):
self.update_time_log_batch(None)
def on_cancel(self):
if cint(self.update_stock) == 1:
self.update_stock_ledger()
self.check_stop_or_close_sales_order("sales_order")
from erpnext.accounts.utils import remove_against_link_from_jv
remove_against_link_from_jv(self.doctype, self.name)
if self.is_return:
# NOTE status updating bypassed for is_return
self.status_updater = []
self.update_status_updater_args()
self.update_prevdoc_status()
self.update_billing_status_in_dn()
if not self.is_return:
self.update_billing_status_for_zero_amount_refdoc("Sales Order")
self.validate_c_form_on_cancel()
self.make_gl_entries_on_cancel()
def update_status_updater_args(self):
if cint(self.update_stock):
self.status_updater.extend([{
'source_dt':'Sales Invoice Item',
'target_dt':'Sales Order Item',
'target_parent_dt':'Sales Order',
'target_parent_field':'per_delivered',
'target_field':'delivered_qty',
'target_ref_field':'qty',
'source_field':'qty',
'join_field':'so_detail',
'percent_join_field':'sales_order',
'status_field':'delivery_status',
'keyword':'Delivered',
'second_source_dt': 'Delivery Note Item',
'second_source_field': 'qty',
'second_join_field': 'so_detail',
'overflow_type': 'delivery',
'extra_cond': """ and exists(select name from `tabSales Invoice`
where name=`tabSales Invoice Item`.parent and update_stock = 1)"""
},
{
'source_dt': 'Sales Invoice Item',
'target_dt': 'Sales Order Item',
'join_field': 'so_detail',
'target_field': 'returned_qty',
'target_parent_dt': 'Sales Order',
# 'target_parent_field': 'per_delivered',
# 'target_ref_field': 'qty',
'source_field': '-1 * qty',
# 'percent_join_field': 'sales_order',
# 'overflow_type': 'delivery',
'extra_cond': """ and exists (select name from `tabSales Invoice` where name=`tabSales Invoice Item`.parent and update_stock=1 and is_return=1)"""
}
])
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
validate_against_credit_limit = False
for d in self.get("items"):
if not (d.sales_order or d.delivery_note):
validate_against_credit_limit = True
break
if validate_against_credit_limit:
check_credit_limit(self.customer, self.company)
def set_missing_values(self, for_validate=False):
pos = self.set_pos_fields(for_validate)
if not self.debit_to:
self.debit_to = get_party_account("Customer", self.customer, self.company)
if not self.due_date and self.customer:
self.due_date = get_due_date(self.posting_date, "Customer", self.customer, self.company)
super(SalesInvoice, self).set_missing_values(for_validate)
if pos:
return {"print_format": pos.get("print_format") }
def update_time_log_batch(self, sales_invoice):
for d in self.get("items"):
if d.time_log_batch:
tlb = frappe.get_doc("Time Log Batch", d.time_log_batch)
tlb.sales_invoice = sales_invoice
tlb.flags.ignore_validate_update_after_submit = True
tlb.save()
def validate_time_logs_are_submitted(self):
for d in self.get("items"):
if d.time_log_batch:
docstatus = frappe.db.get_value("Time Log Batch", d.time_log_batch, "docstatus")
if docstatus!=1:
frappe.throw(_("Time Log Batch {0} must be 'Submitted'").format(d.time_log_batch))
def set_pos_fields(self, for_validate=False):
"""Set retail related fields from POS Profiles"""
if cint(self.is_pos) != 1:
return
from erpnext.stock.get_item_details import get_pos_profile_item_details, get_pos_profile
pos = get_pos_profile(self.company)
if pos:
if not for_validate and not self.customer:
self.customer = pos.customer
self.mode_of_payment = pos.mode_of_payment
# self.set_customer_defaults()
for fieldname in ('territory', 'naming_series', 'currency', 'taxes_and_charges', 'letter_head', 'tc_name',
'selling_price_list', 'company', 'select_print_heading', 'cash_bank_account',
'write_off_account', 'write_off_cost_center'):
if (not for_validate) or (for_validate and not self.get(fieldname)):
self.set(fieldname, pos.get(fieldname))
if not for_validate:
self.update_stock = cint(pos.get("update_stock"))
# set pos values in items
for item in self.get("items"):
if item.get('item_code'):
for fname, val in get_pos_profile_item_details(pos,
frappe._dict(item.as_dict()), pos).items():
if (not for_validate) or (for_validate and not item.get(fname)):
item.set(fname, val)
# fetch terms
if self.tc_name and not self.terms:
self.terms = frappe.db.get_value("Terms and Conditions", self.tc_name, "terms")
# fetch charges
if self.taxes_and_charges and not len(self.get("taxes")):
self.set_taxes()
return pos
def get_advances(self):
if not self.is_return:
super(SalesInvoice, self).get_advances(self.debit_to, "Customer", self.customer,
"Sales Invoice Advance", "advances", "credit_in_account_currency", "sales_order")
def get_company_abbr(self):
return frappe.db.sql("select abbr from tabCompany where name=%s", self.company)[0][0]
def update_against_document_in_jv(self):
"""
Links invoice and advance voucher:
1. cancel advance voucher
2. split into multiple rows if partially adjusted, assign against voucher
3. submit advance voucher
"""
lst = []
for d in self.get('advances'):
if flt(d.allocated_amount) > 0:
args = {
'voucher_no' : d.journal_entry,
'voucher_detail_no' : d.jv_detail_no,
'against_voucher_type' : 'Sales Invoice',
'against_voucher' : self.name,
'account' : self.debit_to,
'party_type': 'Customer',
'party': self.customer,
'is_advance' : 'Yes',
'dr_or_cr' : 'credit_in_account_currency',
'unadjusted_amt' : flt(d.advance_amount),
'allocated_amt' : flt(d.allocated_amount)
}
lst.append(args)
if lst:
from erpnext.accounts.utils import reconcile_against_document
reconcile_against_document(lst)
def validate_debit_to_acc(self):
account = frappe.db.get_value("Account", self.debit_to,
["account_type", "report_type", "account_currency"], as_dict=True)
if not account:
frappe.throw(_("Debit To is required"))
if account.report_type != "Balance Sheet":
frappe.throw(_("Debit To account must be a Balance Sheet account"))
if self.customer and account.account_type != "Receivable":
frappe.throw(_("Debit To account must be a Receivable account"))
self.party_account_currency = account.account_currency
def validate_fixed_asset_account(self):
"""Validate Fixed Asset and whether Income Account Entered Exists"""
for d in self.get('items'):
is_asset_item = frappe.db.get_value("Item", d.item_code, "is_asset_item")
account_type = frappe.db.get_value("Account", d.income_account, "account_type")
if is_asset_item == 1 and account_type != 'Fixed Asset':
msgprint(_("Account {0} must be of type 'Fixed Asset' as Item {1} is an Asset Item").format(d.income_account, d.item_code), raise_exception=True)
def validate_with_previous_doc(self):
super(SalesInvoice, self).validate_with_previous_doc({
"Sales Order": {
"ref_dn_field": "sales_order",
"compare_fields": [["customer", "="], ["company", "="], ["project_name", "="],
["currency", "="]],
},
"Delivery Note": {
"ref_dn_field": "delivery_note",
"compare_fields": [["customer", "="], ["company", "="], ["project_name", "="],
["currency", "="]],
},
})
if cint(frappe.db.get_single_value('Selling Settings', 'maintain_same_sales_rate')) and not self.is_return:
self.validate_rate_with_reference_doc([
["Sales Order", "sales_order", "so_detail"],
["Delivery Note", "delivery_note", "dn_detail"]
])
def set_against_income_account(self):
"""Set against account for debit to account"""
against_acc = []
for d in self.get('items'):
if d.income_account not in against_acc:
against_acc.append(d.income_account)
self.against_income_account = ','.join(against_acc)
def add_remarks(self):
if not self.remarks: self.remarks = 'No Remarks'
def so_dn_required(self):
"""check in manage account if sales order / delivery note required or not."""
dic = {'Sales Order':'so_required','Delivery Note':'dn_required'}
for i in dic:
if frappe.db.get_value('Selling Settings', None, dic[i]) == 'Yes':
for d in self.get('items'):
if frappe.db.get_value('Item', d.item_code, 'is_stock_item') == 1 \
and not d.get(i.lower().replace(' ','_')):
msgprint(_("{0} is mandatory for Item {1}").format(i,d.item_code), raise_exception=1)
def validate_proj_cust(self):
"""check for does customer belong to same project as entered.."""
if self.project_name and self.customer:
res = frappe.db.sql("""select name from `tabProject`
where name = %s and (customer = %s or customer is null or customer = '')""",
(self.project_name, self.customer))
if not res:
throw(_("Customer {0} does not belong to project {1}").format(self.customer,self.project_name))
def validate_pos(self):
if not self.cash_bank_account and flt(self.paid_amount):
frappe.throw(_("Cash or Bank Account is mandatory for making payment entry"))
if flt(self.paid_amount) + flt(self.write_off_amount) \
- flt(self.base_grand_total) > 1/(10**(self.precision("base_grand_total") + 1)):
frappe.throw(_("""Paid amount + Write Off Amount can not be greater than Grand Total"""))
def validate_item_code(self):
for d in self.get('items'):
if not d.item_code:
msgprint(_("Item Code required at Row No {0}").format(d.idx), raise_exception=True)
def validate_warehouse(self):
super(SalesInvoice, self).validate_warehouse()
for d in self.get('items'):
if not d.warehouse:
frappe.throw(_("Warehouse required at Row No {0}").format(d.idx))
def validate_delivery_note(self):
for d in self.get("items"):
if d.delivery_note:
msgprint(_("Stock cannot be updated against Delivery Note {0}").format(d.delivery_note), raise_exception=1)
def validate_write_off_account(self):
if flt(self.write_off_amount) and not self.write_off_account:
msgprint(_("Please enter Write Off Account"), raise_exception=1)
def validate_c_form(self):
""" Blank C-form no if C-form applicable marked as 'No'"""
if self.amended_from and self.c_form_applicable == 'No' and self.c_form_no:
frappe.db.sql("""delete from `tabC-Form Invoice Detail` where invoice_no = %s
and parent = %s""", (self.amended_from, self.c_form_no))
frappe.db.set(self, 'c_form_no', '')
def validate_c_form_on_cancel(self):
""" Display message if C-Form no exists on cancellation of Sales Invoice"""
if self.c_form_applicable == 'Yes' and self.c_form_no:
msgprint(_("Please remove this Invoice {0} from C-Form {1}")
.format(self.name, self.c_form_no), raise_exception = 1)
def validate_dropship_item(self):
for item in self.items:
if item.sales_order:
if frappe.db.get_value("Sales Order Item", item.so_detail, "delivered_by_supplier"):
frappe.throw(_("Could not update stock, invoice contains drop shipping item."))
def update_current_stock(self):
for d in self.get('items'):
if d.item_code and d.warehouse:
bin = frappe.db.sql("select actual_qty from `tabBin` where item_code = %s and warehouse = %s", (d.item_code, d.warehouse), as_dict = 1)
d.actual_qty = bin and flt(bin[0]['actual_qty']) or 0
for d in self.get('packed_items'):
bin = frappe.db.sql("select actual_qty, projected_qty from `tabBin` where item_code = %s and warehouse = %s", (d.item_code, d.warehouse), as_dict = 1)
d.actual_qty = bin and flt(bin[0]['actual_qty']) or 0
d.projected_qty = bin and flt(bin[0]['projected_qty']) or 0
def update_packing_list(self):
if cint(self.update_stock) == 1:
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
else:
self.set('packed_items', [])
def get_warehouse(self):
user_pos_profile = frappe.db.sql("""select name, warehouse from `tabPOS Profile`
where ifnull(user,'') = %s and company = %s""", (frappe.session['user'], self.company))
warehouse = user_pos_profile[0][1] if user_pos_profile else None
if not warehouse:
global_pos_profile = frappe.db.sql("""select name, warehouse from `tabPOS Profile`
where (user is null or user = '') and company = %s""", self.company)
if global_pos_profile:
warehouse = global_pos_profile[0][1]
elif not user_pos_profile:
msgprint(_("POS Profile required to make POS Entry"), raise_exception=True)
return warehouse
def on_update(self):
if cint(self.is_pos) == 1:
if flt(self.paid_amount) == 0:
if self.cash_bank_account:
frappe.db.set(self, 'paid_amount',
flt(flt(self.grand_total) - flt(self.write_off_amount), self.precision("paid_amount")))
else:
# show message that the amount is not paid
frappe.db.set(self,'paid_amount',0)
frappe.msgprint(_("Note: Payment Entry will not be created since 'Cash or Bank Account' was not specified"))
else:
frappe.db.set(self,'paid_amount',0)
frappe.db.set(self, 'base_paid_amount',
flt(self.paid_amount*self.conversion_rate, self.precision("base_paid_amount")))
def check_prev_docstatus(self):
for d in self.get('items'):
if d.sales_order and frappe.db.get_value("Sales Order", d.sales_order, "docstatus") != 1:
frappe.throw(_("Sales Order {0} is not submitted").format(d.sales_order))
if d.delivery_note and frappe.db.get_value("Delivery Note", d.delivery_note, "docstatus") != 1:
throw(_("Delivery Note {0} is not submitted").format(d.delivery_note))
def make_gl_entries(self, repost_future_gle=True):
gl_entries = self.get_gl_entries()
if gl_entries:
from erpnext.accounts.general_ledger import make_gl_entries
# if POS and amount is written off, updating outstanding amt after posting all gl entries
update_outstanding = "No" if (cint(self.is_pos) or self.write_off_account) else "Yes"
make_gl_entries(gl_entries, cancel=(self.docstatus == 2),
update_outstanding=update_outstanding, merge_entries=False)
if update_outstanding == "No":
from erpnext.accounts.doctype.gl_entry.gl_entry import update_outstanding_amt
update_outstanding_amt(self.debit_to, "Customer", self.customer,
self.doctype, self.return_against if cint(self.is_return) else self.name)
if repost_future_gle and cint(self.update_stock) \
and cint(frappe.defaults.get_global_default("auto_accounting_for_stock")):
items, warehouses = self.get_items_and_warehouses()
update_gl_entries_after(self.posting_date, self.posting_time, warehouses, items)
elif self.docstatus == 2 and cint(self.update_stock) \
and cint(frappe.defaults.get_global_default("auto_accounting_for_stock")):
from erpnext.accounts.general_ledger import delete_gl_entries
delete_gl_entries(voucher_type=self.doctype, voucher_no=self.name)
def get_gl_entries(self, warehouse_account=None):
from erpnext.accounts.general_ledger import merge_similar_entries
gl_entries = []
self.make_customer_gl_entry(gl_entries)
self.make_tax_gl_entries(gl_entries)
self.make_item_gl_entries(gl_entries)
# merge gl entries before adding pos entries
gl_entries = merge_similar_entries(gl_entries)
self.make_pos_gl_entries(gl_entries)
self.make_write_off_gl_entry(gl_entries)
return gl_entries
def make_customer_gl_entry(self, gl_entries):
if self.grand_total:
# Didnot use base_grand_total to book rounding loss gle
grand_total_in_company_currency = flt(self.grand_total * self.conversion_rate,
self.precision("grand_total"))
gl_entries.append(
self.get_gl_dict({
"account": self.debit_to,
"party_type": "Customer",
"party": self.customer,
"against": self.against_income_account,
"debit": grand_total_in_company_currency,
"debit_in_account_currency": grand_total_in_company_currency \
if self.party_account_currency==self.company_currency else self.grand_total,
"against_voucher": self.return_against if cint(self.is_return) else self.name,
"against_voucher_type": self.doctype
}, self.party_account_currency)
)
def make_tax_gl_entries(self, gl_entries):
for tax in self.get("taxes"):
if flt(tax.base_tax_amount_after_discount_amount):
account_currency = get_account_currency(tax.account_head)
gl_entries.append(
self.get_gl_dict({
"account": tax.account_head,
"against": self.customer,
"credit": flt(tax.base_tax_amount_after_discount_amount),
"credit_in_account_currency": flt(tax.base_tax_amount_after_discount_amount) \
if account_currency==self.company_currency else flt(tax.tax_amount_after_discount_amount),
"cost_center": tax.cost_center
}, account_currency)
)
def make_item_gl_entries(self, gl_entries):
# income account gl entries
for item in self.get("items"):
if flt(item.base_net_amount):
account_currency = get_account_currency(item.income_account)
gl_entries.append(
self.get_gl_dict({
"account": item.income_account,
"against": self.customer,
"credit": item.base_net_amount,
"credit_in_account_currency": item.base_net_amount \
if account_currency==self.company_currency else item.net_amount,
"cost_center": item.cost_center
}, account_currency)
)
# expense account gl entries
if cint(frappe.defaults.get_global_default("auto_accounting_for_stock")) \
and cint(self.update_stock):
gl_entries += super(SalesInvoice, self).get_gl_entries()
def make_pos_gl_entries(self, gl_entries):
if cint(self.is_pos) and self.cash_bank_account and self.paid_amount:
bank_account_currency = get_account_currency(self.cash_bank_account)
# POS, make payment entries
gl_entries.append(
self.get_gl_dict({
"account": self.debit_to,
"party_type": "Customer",
"party": self.customer,
"against": self.cash_bank_account,
"credit": self.base_paid_amount,
"credit_in_account_currency": self.base_paid_amount \
if self.party_account_currency==self.company_currency else self.paid_amount,
"against_voucher": self.return_against if cint(self.is_return) else self.name,
"against_voucher_type": self.doctype,
}, self.party_account_currency)
)
gl_entries.append(
self.get_gl_dict({
"account": self.cash_bank_account,
"against": self.customer,
"debit": self.base_paid_amount,
"debit_in_account_currency": self.base_paid_amount \
if bank_account_currency==self.company_currency else self.paid_amount
}, bank_account_currency)
)
def make_write_off_gl_entry(self, gl_entries):
# write off entries, applicable if only pos
if self.write_off_account and self.write_off_amount:
write_off_account_currency = get_account_currency(self.write_off_account)
gl_entries.append(
self.get_gl_dict({
"account": self.debit_to,
"party_type": "Customer",
"party": self.customer,
"against": self.write_off_account,
"credit": self.base_write_off_amount,
"credit_in_account_currency": self.base_write_off_amount \
if self.party_account_currency==self.company_currency else self.write_off_amount,
"against_voucher": self.return_against if cint(self.is_return) else self.name,
"against_voucher_type": self.doctype
}, self.party_account_currency)
)
gl_entries.append(
self.get_gl_dict({
"account": self.write_off_account,
"against": self.customer,
"debit": self.base_write_off_amount,
"debit_in_account_currency": self.base_write_off_amount \
if write_off_account_currency==self.company_currency else self.write_off_amount,
"cost_center": self.write_off_cost_center
}, write_off_account_currency)
)
def update_billing_status_in_dn(self, update_modified=True):
updated_delivery_notes = []
for d in self.get("items"):
if d.dn_detail:
billed_amt = frappe.db.sql("""select sum(amount) from `tabSales Invoice Item`
where dn_detail=%s and docstatus=1""", d.dn_detail)
billed_amt = billed_amt and billed_amt[0][0] or 0
frappe.db.set_value("Delivery Note Item", d.dn_detail, "billed_amt", billed_amt, update_modified=update_modified)
updated_delivery_notes.append(d.delivery_note)
elif d.so_detail:
updated_delivery_notes += update_billed_amount_based_on_so(d.so_detail, update_modified)
for dn in set(updated_delivery_notes):
frappe.get_doc("Delivery Note", dn).update_billing_percentage(update_modified=update_modified)
def on_recurring(self, reference_doc):
for fieldname in ("c_form_applicable", "c_form_no", "write_off_amount"):
self.set(fieldname, reference_doc.get(fieldname))
self.due_date = None
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context["title"] = _("My Invoices")
return list_context
@frappe.whitelist()
def get_bank_cash_account(mode_of_payment, company):
account = frappe.db.get_value("Mode of Payment Account",
{"parent": mode_of_payment, "company": company}, "default_account")
if not account:
frappe.msgprint(_("Please set default Cash or Bank account in Mode of Payment {0}").format(mode_of_payment))
return {
"account": account
}
@frappe.whitelist()
def make_delivery_note(source_name, target_doc=None):
def set_missing_values(source, target):
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source_doc, target_doc, source_parent):
target_doc.base_amount = (flt(source_doc.qty) - flt(source_doc.delivered_qty)) * \
flt(source_doc.base_rate)
target_doc.amount = (flt(source_doc.qty) - flt(source_doc.delivered_qty)) * \
flt(source_doc.rate)
target_doc.qty = flt(source_doc.qty) - flt(source_doc.delivered_qty)
doclist = get_mapped_doc("Sales Invoice", source_name, {
"Sales Invoice": {
"doctype": "Delivery Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Invoice Item": {
"doctype": "Delivery Note Item",
"field_map": {
"name": "si_detail",
"parent": "against_sales_invoice",
"serial_no": "serial_no",
"sales_order": "against_sales_order",
"so_detail": "so_detail"
},
"postprocess": update_item,
"condition": lambda doc: doc.delivered_by_supplier!=1
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"field_map": {
"incentives": "incentives"
},
"add_if_empty": True
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def make_sales_return(source_name, target_doc=None):
from erpnext.controllers.sales_and_purchase_return import make_return_doc
return make_return_doc("Sales Invoice", source_name, target_doc)
| agpl-3.0 | 6,422,347,278,987,420,000 | 35.204638 | 153 | 0.685771 | false |
ahwillia/PyNeuron-Toolbox | PyNeuronToolbox/neuromorpho.py | 1 | 5790 | """
Scraper for querying NeuroMorpho.Org from Python.
For more on NeuroMorpho.Org, see:
Ascoli GA, Donohue DE, Halavi M. (2007) NeuroMorpho.Org: a central
resource for neuronal morphologies.J Neurosci., 27(35):9247-51
Run this file as a stand-alone script for a demo. The demo queries NeuroMorpho.Org
in general, and provides extra information about the cell mb100318-a which is
associated with the publication:
Bagnall, M. W., Hull, C., Bushong, E. A., Ellisman, M. H., & Scanziani, M. (2011).
Multiple clusters of release sites formed by individual thalamic afferents onto
cortical interneurons ensure reliable transmission. Neuron, 71(1), 180-194.
As a web-scraper, this module may break if the website structure changes, but it
was known to work as of April 2, 2016.
To get a list of all cell types, species, or regions, call cell_types(), species(),
or regions(), respectively.
Given a type from one of these lists, get the matching cell names via cell_names.
e.g. cell_names('blowfly')
To get the metadata for a given cell name, use metadata.
e.g. metadata('mb100318-a')
To get the morphology for a given cell name, use morphology.
e.g. morphology('mb100318-a')
An optional format keyword argument allows selecting between the original and the
standardized versions.
"""
import urllib2
import re
import json
import base64
_cache = {}
def _read_neuromorpho_table(bywhat):
"""Helper function, reads data from NeuroMorpho.Org, stores in cache."""
html = urllib2.urlopen('http://neuromorpho.org/by%s.jsp' % bywhat).read()
result = [m.strip() for m in re.findall("maketable\('(.*?)'\)", html)]
_cache[bywhat] = set(result)
return result
def cell_types():
"""Return a list of all cell types."""
return _read_neuromorpho_table('cell')
def species():
"""Return a list of all species."""
return _read_neuromorpho_table('species')
def regions():
"""Return a list of all the brain regions."""
return _read_neuromorpho_table('region')
def cell_names(category):
"""Return a list of all the names of cells of a given cell type, species, or region.
Examples:
cell_names('Aspiny')
cell_names('blowfly')
cell_names('amygdala')
"""
# figure out if category is a cell type, species, or region
# check the cached sets first
for bywhat, items in _cache.iteritems():
if category in items:
return _get_data_for_by(bywhat, category)
# no luck: try all three options
for bywhat in ['cell', 'region', 'species']:
result = _get_data_for_by(bywhat, category)
if result:
return result
return []
def _get_data_for_by(bywhat, category):
"""Helper function for cell_names."""
query_code = bywhat if bywhat != 'cell' else 'class'
html = urllib2.urlopen('http://neuromorpho.org/getdataforby%s.jsp?%s=%s' % (bywhat, query_code, category.replace(' ', '%20'))).read()
return [m for m in re.findall("neuron_name=(.*?)'", html)]
def metadata(neuron_name):
"""Return a dict of the metadata for the specified neuron.
Example:
metadata('mb100318-a')
"""
html = urllib2.urlopen('http://neuromorpho.org/neuron_info.jsp?neuron_name=%s' % neuron_name).read()
# remove non-breaking spaces
html = html.replace(' ', ' ')
# remove units
html = html.replace('μm<sup>2</sup>', ' ')
html = html.replace('μm', ' ')
html = html.replace('°', ' ')
html = html.replace('<b>x</b>', ' ')
html = html.replace('<sup>3</sup>', '')
html2 = html.replace('\n', '')
keys = [i[1][:-3].strip() for i in re.findall('<td align="right" width="50%"(.*?)>(.*?)</td>', html2)]
values = [i[1].strip() for i in re.findall('<td align="left"(.*?)>(.*?)</td>', html2)[2:]]
return dict(zip(keys, values))
def morphology(neuron_name, format='swc'):
"""Return the morphology associated with a given name.
Format options:
swc -- always "stanadardized" file format (default)
original -- original
Example:
morphology('mb100318-a', format='swc')
morphology('mb100318-a', format='original')
"""
url_paths_from_format = {'swc': 'CNG%20Version', 'original': 'Source-Version'}
assert(format in url_paths_from_format)
# locate the path to the downloads
html = urllib2.urlopen('http://neuromorpho.org/neuron_info.jsp?neuron_name=%s' % neuron_name).read()
if format == 'swc':
url = re.findall("<a href=dableFiles/(.*?)>Morphology File \(Standardized", html)[0]
else:
url = re.findall("<a href=dableFiles/(.*?)>Morphology File \(Original", html)[0]
return urllib2.urlopen('http://NeuroMorpho.org/dableFiles/%s' % url).read()
def download(neuron_name, filename=None):
format = 'swc'
if filename is not None and len(filename.split('.'))==0:
filename = base64.urlsafe_b64encode(filename+'.'+format)
if filename is None:
filename = base64.urlsafe_b64encode(neuron_name+'.'+format)
with open(filename, 'w') as f:
f.write(morphology(neuron_name, format=format))
if __name__ == '__main__':
print 'Demo of reading data from NeuroMorpho.Org'
print
for string, fn in zip(['cell types', 'brain regions', 'species'], [cell_types, regions, species]):
print 'All %s:' % string
print ', '.join(fn())
print
for category in ['amygdala', 'blowfly', 'Aspiny']:
print 'All %s:' % category
print ', '.join(cell_names(category))
print
print 'Metadata for mb100318-a:'
print json.dumps(metadata('mb100318-a'), indent=4)
print
print 'Morphology (standardized) for mb100318-a (first 10 lines):'
print '\n'.join(morphology('mb100318-a', format='standardized').split('\n')[:10]) | mit | -963,203,165,447,386,800 | 35.19375 | 137 | 0.647323 | false |
cloudify-cosmo/cloudify-cli | cloudify_cli/tests/commands/test_deployments.py | 1 | 48285 | ########
# Copyright (c) 2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from __future__ import unicode_literals
import json
import inspect
import datetime
import warnings
from uuid import UUID
from mock import patch, MagicMock, PropertyMock, Mock
from cloudify_rest_client import (
deployments,
executions,
blueprints,
deployment_updates,
execution_schedules
)
from cloudify.exceptions import NonRecoverableError
from cloudify_rest_client.exceptions import (
CloudifyClientError,
UnknownDeploymentInputError,
MissingRequiredDeploymentInputError
)
from cloudify_rest_client.deployment_modifications import (
DeploymentModification
)
from cloudify_rest_client.responses import ListResponse, Metadata
from cloudify_cli.constants import DEFAULT_TENANT_NAME
from cloudify_cli.exceptions import CloudifyCliError, CloudifyValidationError
from ... import exceptions
from .mocks import MockListResponse
from .test_base import CliCommandTest
from .constants import (BLUEPRINTS_DIR,
SAMPLE_BLUEPRINT_PATH,
SAMPLE_ARCHIVE_PATH,
SAMPLE_INPUTS_PATH)
class DeploymentUpdatesTest(CliCommandTest):
def _mock_wait_for_executions(self, value):
patcher = patch(
'cloudify_cli.execution_events_fetcher.wait_for_execution',
MagicMock(return_value=PropertyMock(error=value))
)
self.addCleanup(patcher.stop)
patcher.start()
def _mock_wait_for_blueprint_upload(self, value):
patcher = patch(
'cloudify_cli.utils.wait_for_blueprint_upload',
MagicMock(return_value=PropertyMock(error=value))
)
self.addCleanup(patcher.stop)
patcher.start()
def setUp(self):
super(DeploymentUpdatesTest, self).setUp()
self.client.license.check = Mock()
self.use_manager()
self.client.deployment_updates.update = MagicMock()
self.client.blueprints.upload = MagicMock()
self.client.executions = MagicMock()
self.client.deployment_updates.update_with_existing_blueprint = \
MagicMock()
self._mock_wait_for_executions(False)
self._mock_wait_for_blueprint_upload(False)
patcher = patch('cloudify_cli.inputs.inputs_to_dict', MagicMock())
self.addCleanup(patcher.stop)
patcher.start()
def test_deployment_update_get(self):
old_value = 'old value 1'
new_value = 'new value 1'
steps = [{'entity_id': 'step1'}, {'entity_id': 'step2'}]
self.client.deployment_updates.get = Mock(return_value={
'id': 'update-id-1',
'old_inputs': {'inp1': old_value},
'new_inputs': {'inp1': new_value},
'steps': steps,
'recursive_dependencies': {}
})
outcome = self.invoke('deployments get-update update-id-1')
self.assertIn(old_value, outcome.output)
self.assertIn(new_value, outcome.output)
for d in steps:
for k, v in d.items():
self.assertIn(str(k), outcome.output)
self.assertIn(str(v), outcome.output)
def test_deployment_update_preview(self):
old_value = 'old value 1'
new_value = 'new value 1'
steps = [
{'entity_id': 'nodes:step1', 'action': 'add'},
{'entity_id': 'nodes:step2', 'action': 'remove'},
]
self.client.deployment_updates.update_with_existing_blueprint = Mock(
return_value={
'id': 'update-id-1',
'old_inputs': {'inp1': old_value},
'new_inputs': {'inp1': new_value},
'steps': steps,
'recursive_dependencies': {'deployment': 'dependent_dep'}
})
outcome = self.invoke(
'deployments update dep-1 -b b2 --preview --json')
output = json.loads(outcome.output)
self.assertEqual(output['installed_nodes'], ['step1'])
self.assertEqual(output['uninstalled_nodes'], ['step2'])
self.assertEqual(output['recursive_dependencies'],
{'deployment': 'dependent_dep'})
# find out if the preview=True argument has been set. It might have
# been passed positionally or by name into the rest-client method,
# so let's use inspect to find out which argument value was actually
# the preview arg
calls = self.client.deployment_updates\
.update_with_existing_blueprint.mock_calls
self.assertEqual(len(calls), 1)
_, args, kwargs = calls[0]
call_args = inspect.getcallargs(
deployment_updates.DeploymentUpdatesClient(None)
.update_with_existing_blueprint,
*args, **kwargs)
self.assertTrue(call_args['preview'])
def test_deployment_update_update_plugins_is_false(self):
update_client_mock = Mock()
self.client.deployment_updates.update_with_existing_blueprint = \
update_client_mock
self.invoke('deployments update dep-1 -b b2 --dont-update-plugins')
calls = self.client.deployment_updates\
.update_with_existing_blueprint.mock_calls
self.assertEqual(len(calls), 1)
_, args, kwargs = calls[0]
call_args = inspect.getcallargs(
deployment_updates.DeploymentUpdatesClient(None)
.update_with_existing_blueprint,
*args, **kwargs)
self.assertIn('update_plugins', call_args)
self.assertFalse(call_args['update_plugins'])
def test_deployment_update_update_plugins_is_true(self):
update_client_mock = Mock()
self.client.deployment_updates.update_with_existing_blueprint = \
update_client_mock
self.invoke('deployments update dep-1 -b b2')
calls = self.client.deployment_updates\
.update_with_existing_blueprint.mock_calls
self.assertEqual(len(calls), 1)
_, args, kwargs = calls[0]
call_args = inspect.getcallargs(
deployment_updates.DeploymentUpdatesClient(None)
.update_with_existing_blueprint,
*args, **kwargs)
self.assertIn('update_plugins', call_args)
self.assertTrue(call_args['update_plugins'])
def test_deployment_update_get_json(self):
old_value = 'old value 1'
new_value = 'new value 1'
steps = [{'entity_id': 'step1'}, {'entity_id': 'step2'}]
self.client.deployment_updates.get = Mock(return_value={
'id': 'update-id-1',
'old_inputs': {'inp1': old_value},
'new_inputs': {'inp1': new_value},
'steps': steps
})
outcome = self.invoke('deployments get-update update-id-1 --json')
parsed = json.loads(outcome.output)
self.assertEqual(parsed['old_inputs'], {'inp1': old_value})
self.assertEqual(parsed['new_inputs'], {'inp1': new_value})
def test_deployment_update_successful(self):
outcome = self.invoke(
'cfy deployments update -p {0} '
'my_deployment'.format(SAMPLE_BLUEPRINT_PATH))
self.assertIn('Updating deployment my_deployment', outcome.logs)
self.assertIn('Finished executing workflow', outcome.logs)
self.assertIn(
'Successfully updated deployment my_deployment', outcome.logs)
def test_deployment_update_failure(self):
self._mock_wait_for_executions(True)
outcome = self.invoke(
'cfy deployments update -p {0} my_deployment'
.format(SAMPLE_BLUEPRINT_PATH),
err_str_segment='',
exception=exceptions.SuppressedCloudifyCliError)
logs = outcome.logs.split('\n')
self.assertIn('Updating deployment my_deployment', logs[-3])
self.assertIn('Execution of workflow', logs[-2])
self.assertIn('failed', logs[-2])
self.assertIn(
'Failed updating deployment my_deployment', logs[-1])
def test_deployment_update_json_parameter(self):
with warnings.catch_warnings(record=True) as warns:
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --json-output'
.format(SAMPLE_BLUEPRINT_PATH))
# catch_warnings sometimes gets the same thing more than once,
# depending on how are the tests run. I don't know why.
self.assertTrue(warns)
self.assertIn('use the global', str(warns[0]))
def test_deployment_update_include_logs_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --include-logs'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_skip_install_flag(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --skip-install'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_skip_uninstall_flag(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --skip-uninstall'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_force_flag(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment --force'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_override_workflow_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} my_deployment -w override-wf'
.format(SAMPLE_BLUEPRINT_PATH))
def test_deployment_update_archive_location_parameter(self):
self.invoke(
'cfy deployments update -p {0} my_deployment'
.format(SAMPLE_ARCHIVE_PATH))
def test_dep_update_archive_loc_and_bp_path_parameters_exclusion(self):
self.invoke(
'cfy deployments update -p '
'{0} -n {1}/helloworld/'
'blueprint2.yaml my_deployment'
.format(SAMPLE_BLUEPRINT_PATH, BLUEPRINTS_DIR),
err_str_segment='param should be passed only when updating'
' from an archive'
)
def test_deployment_update_blueprint_filename_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} -n blueprint.yaml my_deployment'
.format(SAMPLE_ARCHIVE_PATH))
def test_deployment_update_inputs_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} -i {1} my_deployment'
.format(SAMPLE_ARCHIVE_PATH, SAMPLE_INPUTS_PATH))
def test_deployment_update_multiple_inputs_parameter(self):
self.invoke(
'cfy deployments update -p '
'{0} -i {1} -i {1} my_deployment'
.format(SAMPLE_ARCHIVE_PATH, SAMPLE_INPUTS_PATH))
def test_deployment_update_no_deployment_id_parameter(self):
outcome = self.invoke(
'cfy deployments update -p '
'{0}'.format(SAMPLE_ARCHIVE_PATH),
err_str_segment='2', # Exit code
exception=SystemExit)
self.assertIn('missing argument', outcome.output.lower())
self.assertIn('DEPLOYMENT_ID', outcome.output)
def test_deployment_update_no_bp_path_nor_archive_loc_parameters(self):
self.invoke(
'cfy deployments update my_deployment',
err_str_segment='Must supply either a blueprint '
'(by id of an existing blueprint, or a path to a '
'new blueprint), or new inputs',
exception=CloudifyCliError)
def test_deployment_update_inputs_correct(self):
self.invoke(
'cfy deployments update -p '
'{0} -i {1} my_deployment --auto-correct-types'
.format(SAMPLE_ARCHIVE_PATH, SAMPLE_INPUTS_PATH))
class DeploymentsTest(CliCommandTest):
def setUp(self):
super(DeploymentsTest, self).setUp()
self.use_manager()
def test_deployment_create(self):
deployment = deployments.Deployment({
'deployment_id': 'deployment_id'
})
self.client.deployments.create = MagicMock(return_value=deployment)
self.invoke(
'cfy deployments create deployment -b a-blueprint-id')
def test_deployment_create_with_skip_plugins_validation_flag(self):
deployment = deployments.Deployment({
'deployment_id': 'deployment_id'
})
self.client.deployments.create = MagicMock(return_value=deployment)
self.invoke(
'cfy deployments create deployment -b a --skip-plugins-validation')
call_args = list(self.client.deployments.create.call_args)
self.assertIn('skip_plugins_validation', call_args[1])
self.assertEqual(call_args[1]['skip_plugins_validation'], True)
def test_deployment_create_without_skip_plugins_validation_flag(self):
deployment = deployments.Deployment({
'deployment_id': 'deployment_id'
})
self.client.deployments.create = MagicMock(return_value=deployment)
self.invoke(
'cfy deployments create deployment -b aa')
call_args = list(self.client.deployments.create.call_args)
self.assertIn('skip_plugins_validation', call_args[1])
self.assertEqual(call_args[1]['skip_plugins_validation'], False)
def test_deployment_create_with_site_name(self):
deployment = deployments.Deployment({'deployment_id': 'deployment_id'})
self.client.deployments.create = MagicMock(return_value=deployment)
self.invoke('cfy deployments create deployment -b a --site-name site')
call_args = list(self.client.deployments.create.call_args)
self.assertEqual(call_args[1]['site_name'], 'site')
def test_deployment_create_invalid_site_name(self):
error_msg = 'The `site_name` argument contains illegal characters'
self.invoke('cfy deployments create deployment -b a --site-name :site',
err_str_segment=error_msg,
exception=CloudifyValidationError)
def test_deployment_create_without_site_name(self):
deployment = deployments.Deployment({'deployment_id': 'deployment_id'})
self.client.deployments.create = MagicMock(return_value=deployment)
self.invoke('cfy deployments create deployment -b a')
call_args = list(self.client.deployments.create.call_args)
self.assertIsNone(call_args[1]['site_name'])
def test_deployments_delete(self):
self.client.deployments.delete = MagicMock()
self.client.executions.list = MagicMock(
side_effect=CloudifyClientError(
'`Deployment` with ID `my-dep` was not found')
)
self.invoke('cfy deployments delete my-dep')
def test_deployments_execute(self):
execute_response = executions.Execution({'status': 'started'})
get_execution_response = executions.Execution({
'status': 'terminated',
'workflow_id': 'mock_wf',
'deployment_id': 'deployment-id',
'blueprint_id': 'blueprint-id',
'error': '',
'id': 'id',
'created_at': datetime.datetime.now(),
'parameters': {}
})
success_event = {
'event_type': 'workflow_succeeded',
'type': 'foo',
'timestamp': '12345678',
'message': 'workflow execution succeeded',
'error_causes': '<error_causes>',
'deployment_id': 'deployment-id',
'execution_id': '<execution_id>',
'source_id': None,
'target_id': None,
'node_name': '<node_name>',
'operation': '<operation>',
'workflow_id': '<workflow_id>',
'node_instance_id': '<node_instance_id>',
}
get_events_response = MockListResponse([success_event], 1)
self.client.executions.start = MagicMock(
return_value=execute_response)
self.client.executions.get = MagicMock(
return_value=get_execution_response)
self.client.events.list = MagicMock(return_value=get_events_response)
self.invoke('cfy executions start install -d a-deployment-id')
def test_deployments_list_all(self):
self.client.deployments.list = MagicMock(
return_value=MockListResponse()
)
self.invoke('cfy deployments list')
self.invoke('cfy deployments list -t dummy_tenant')
self.invoke('cfy deployments list -a')
def test_deployments_list_of_blueprint(self):
deps = [
{
'blueprint_id': 'b1_blueprint',
'created_at': 'now',
'created_by': 'admin',
'updated_at': 'now',
'id': 'id',
'visibility': 'private',
'tenant_name': DEFAULT_TENANT_NAME
},
{
'blueprint_id': 'b1_blueprint',
'created_at': 'now',
'created_by': 'admin',
'updated_at': 'now',
'id': 'id',
'visibility': 'private',
'tenant_name': DEFAULT_TENANT_NAME
},
{
'blueprint_id': 'b2_blueprint',
'created_at': 'now',
'created_by': 'admin',
'updated_at': 'now',
'id': 'id',
'visibility': 'private',
'tenant_name': DEFAULT_TENANT_NAME
}
]
self.client.deployments.list = MagicMock(
return_value=MockListResponse(items=deps)
)
outcome = self.invoke('cfy deployments list -b b1_blueprint -v')
self.assertNotIn('b2_blueprint', outcome.logs)
self.assertIn('b1_blueprint', outcome.logs)
def test_deployments_execute_nonexistent_operation(self):
# Verifying that the CLI allows for arbitrary operation names,
# while also ensuring correct error-handling of nonexistent
# operations
expected_error = "operation nonexistent-operation doesn't exist"
self.client.executions.start = MagicMock(
side_effect=CloudifyClientError(expected_error))
command = \
'cfy executions start nonexistent-operation -d a-deployment-id'
self.invoke(
command,
err_str_segment=expected_error,
exception=CloudifyClientError)
def test_deployments_outputs(self):
outputs = deployments.DeploymentOutputs({
'deployment_id': 'dep1',
'outputs': {
'port': 8080
}
})
deployment = deployments.Deployment({
'outputs': {
'port': {
'description': 'Webserver port.',
'value': '...'
}
}
})
self.client.deployments.get = MagicMock(return_value=deployment)
self.client.deployments.outputs.get = MagicMock(return_value=outputs)
self.invoke('cfy deployments outputs dep1')
def test_deployments_outputs_json(self):
outputs = deployments.DeploymentOutputs({
'deployment_id': 'dep1',
'outputs': {
'port': 8080
}
})
deployment = deployments.Deployment({
'outputs': {
'port': {
'description': 'Webserver port.',
'value': '...'
}
}
})
self.client.deployments.get = MagicMock(return_value=deployment)
self.client.deployments.outputs.get = MagicMock(return_value=outputs)
outcome = self.invoke('cfy deployments outputs dep1 --json')
parsed = json.loads(outcome.output)
self.assertEqual(parsed, {
'port': {
'value': 8080,
'description': 'Webserver port.'
}
})
def test_deployments_inputs(self):
deployment = deployments.Deployment({
'deployment_id': 'deployment_id',
'inputs': {'key1': 'val1', 'key2': 'val2'}
})
expected_outputs = [
'Retrieving inputs for deployment deployment_id...',
'- "key1":',
'Value: val1',
'- "key2":',
'Value: val2',
]
self.client.deployments.get = MagicMock(return_value=deployment)
outcome = self.invoke('cfy deployments inputs deployment_id')
outcome = [o.strip() for o in outcome.logs.split('\n')]
for output in expected_outputs:
self.assertIn(output, outcome)
def test_deployments_inputs_json(self):
deployment = deployments.Deployment({
'deployment_id': 'deployment_id',
'inputs': {'key1': 'val1', 'key2': 'val2'}
})
self.client.deployments.get = MagicMock(return_value=deployment)
outcome = self.invoke('cfy deployments inputs deployment_id --json')
parsed = json.loads(outcome.output)
self.assertEqual(parsed, {'key1': 'val1', 'key2': 'val2'})
def test_missing_required_inputs(self):
self._test_deployment_inputs(
MissingRequiredDeploymentInputError,
{'input1': 'value1'},
['Unable to create deployment']
)
def test_invalid_input(self):
self._test_deployment_inputs(
UnknownDeploymentInputError,
{'input1': 'value1',
'input2': 'value2',
'input3': 'value3'},
['Unable to create deployment']
)
def test_deployments_set_visibility(self):
self.client.deployments.set_visibility = MagicMock()
self.invoke('cfy deployments set-visibility a-deployment-id -l '
'tenant')
self.invoke('cfy deployments set-visibility a-deployment-id -l '
'global')
def test_deployments_set_visibility_invalid_argument(self):
self.invoke(
'cfy deployments set-visibility a-deployment-id -l private',
err_str_segment='Invalid visibility: `private`',
exception=CloudifyCliError
)
self.invoke(
'cfy deployments set-visibility a-deployment-id -l bla',
err_str_segment='Invalid visibility: `bla`',
exception=CloudifyCliError
)
def test_deployments_set_visibility_missing_argument(self):
outcome = self.invoke(
'cfy deployments set-visibility a-deployment-id',
err_str_segment='2',
exception=SystemExit
)
self.assertIn('missing option', outcome.output.lower())
self.assertIn('--visibility', outcome.output)
def test_deployments_set_visibility_wrong_argument(self):
outcome = self.invoke(
'cfy deployments set-visibility a-deployment-id -g',
err_str_segment='2', # Exit code
exception=SystemExit
)
self.assertIn('Error: no such option: -g', outcome.output)
def test_deployments_create_mutually_exclusive_arguments(self):
outcome = self.invoke(
'cfy deployments create deployment -b a-blueprint-id -l tenant '
'--private-resource',
err_str_segment='2', # Exit code
exception=SystemExit
)
self.assertIn('mutually exclusive with arguments:', outcome.output)
def test_deployments_create_invalid_argument(self):
self.invoke(
'cfy deployments create deployment -b a-blueprint-id -l bla',
err_str_segment='Invalid visibility: `bla`',
exception=CloudifyCliError
)
def test_deployments_create_with_visibility(self):
self.client.deployments.create = MagicMock()
self.invoke('cfy deployments create deployment -b a-blueprint-id '
'-l private')
def test_deployments_set_site_with_site_name(self):
self.client.deployments.set_site = MagicMock()
self.invoke('cfy deployments set-site deployment_1 --site-name site')
call_args = list(self.client.deployments.set_site.call_args)
self.assertEqual(call_args[0][0], 'deployment_1')
self.assertEqual(call_args[1]['site_name'], 'site')
self.assertFalse(call_args[1]['detach_site'])
def test_deployments_set_site_without_options(self):
error_msg = 'Must provide either a `--site-name` of a valid site ' \
'or `--detach-site`'
self.invoke('cfy deployments set-site deployment_1',
err_str_segment=error_msg,
exception=CloudifyCliError)
def test_deployments_set_site_with_detach(self):
self.client.deployments.set_site = MagicMock()
self.invoke('cfy deployments set-site deployment_1 --detach-site')
call_args = list(self.client.deployments.set_site.call_args)
self.assertEqual(call_args[0][0], 'deployment_1')
self.assertIsNone(call_args[1]['site_name'])
self.assertTrue(call_args[1]['detach_site'])
def test_deployments_set_site_mutually_exclusive(self):
outcome = self.invoke(
'cfy deployments set-site deployment_1 -s site --detach-site',
err_str_segment='2', # Exit code
exception=SystemExit
)
error_msg = 'Error: Illegal usage: `detach_site` is ' \
'mutually exclusive with arguments: [site_name]'
self.assertIn(error_msg, outcome.output)
def test_deployment_set_site_no_deployment_id(self):
outcome = self.invoke('cfy deployments set-site',
err_str_segment='2', # Exit code
exception=SystemExit)
self.assertIn('missing argument', outcome.output.lower())
self.assertIn('DEPLOYMENT_ID', outcome.output)
def test_deployment_set_site_invalid_site_name(self):
error_msg = 'The `site_name` argument contains illegal characters'
self.invoke('cfy deployments set-site deployment_1 --site-name :site',
err_str_segment=error_msg,
exception=CloudifyValidationError)
def _test_deployment_inputs(self, exception_type,
inputs, expected_outputs=None):
def raise_error(*args, **kwargs):
raise exception_type('no inputs')
blueprint = blueprints.Blueprint({
'plan': {
'inputs': {
'input1': {'description': 'val1'},
'input2': {'description': 'val2'}
}
}
})
self.client.blueprints.get = MagicMock(return_value=blueprint)
self.client.deployments.create = raise_error
inputs_line = ' '.join(
['-i {0}={1}'.format(key, value) for
key, value in inputs.items()])
outcome = self.invoke(
'cfy deployments create deployment -b a-blueprint-id {0}'.format(
inputs_line),
exception=exceptions.SuppressedCloudifyCliError,
err_str_segment='no inputs'
)
outcome = [o.strip() for o in outcome.logs.split('\n')]
if not expected_outputs:
expected_outputs = []
for output in expected_outputs:
found = False
for outcome_line in outcome:
if output in outcome_line:
found = True
break
self.assertTrue(found, 'String ''{0}'' not found in outcome {1}'
.format(output, outcome))
def test_create_deployment_with_display_name(self):
dep_display_name = 'Depl\xf3yment'
self.client.deployments.create = Mock()
self.invoke('cfy deployments create -b bp1 -n {0} '
'dep1'.format(dep_display_name))
call_args = list(self.client.deployments.create.call_args)
self.assertEqual(call_args[1]['display_name'], dep_display_name)
def test_create_deployment_display_name_defaults_to_id(self):
dep_id = 'dep1'
self.client.deployments.create = Mock()
self.invoke('cfy deployments create -b bp1 {0}'.format(dep_id))
call_args = list(self.client.deployments.create.call_args)
self.assertEqual(call_args[1]['display_name'], dep_id)
def test_create_deployment_with_generated_id(self):
self.client.deployments.create = Mock()
self.invoke('cfy deployments create -b bp1 --generate-id')
call_args = list(self.client.deployments.create.call_args)
try:
UUID(call_args[0][1], version=4)
except ValueError:
raise Exception('The deployment was not created with a valid UUID')
def test_create_deployment_with_id_and_generate_id_fails(self):
self.invoke('cfy deployments create -b bp1 --generate-id dep1',
err_str_segment='cannot be provided',
exception=CloudifyCliError)
def test_list_deployments_with_search_name(self):
search_name_pattern = 'De#pl\xf3yment 1'
self.client.deployments.list = Mock(return_value=MockListResponse())
self.invoke('cfy deployments list --search-name '
'"{0}"'.format(search_name_pattern))
call_args = list(self.client.deployments.list.call_args)
self.assertEqual(call_args[1].get('_search_name'), search_name_pattern)
class DeploymentModificationsTest(CliCommandTest):
def _mock_wait_for_executions(self, value):
patcher = patch(
'cloudify_cli.execution_events_fetcher.wait_for_execution',
MagicMock(return_value=PropertyMock(error=value))
)
self.addCleanup(patcher.stop)
patcher.start()
def setUp(self):
super(DeploymentModificationsTest, self).setUp()
self.use_manager()
self._deployment_modifications = [
DeploymentModification({
'id': '0229a7d4-0bef-4d95-910d-a341663172e1',
'deployment_id': 'dep1',
'context': {
'workflow_id': 'scale',
'execution_id': '842686d6-e960-48a6-95b5-250fc26a7ed4',
},
'status': 'finished',
'tenant_name': 'default_tenant',
'created_at': datetime.datetime(2019, 8, 27, 16, 5, 24),
'visibility': 'tenant'
}),
DeploymentModification({
'id': 'e8962cbd-6645-4c60-9d6d-ee3215b39808',
'deployment_id': 'dep1',
'context': {
'workflow_id': 'scale',
'execution_id': 'c6bfc3de-ca19-4335-be77-b12edccba582',
},
'status': 'started',
'tenant_name': 'default_tenant',
'created_at': datetime.datetime(2019, 8, 27, 16, 35, 24),
'visibility': 'tenant'
}),
]
def test_deployment_modifications_list(self):
self.client.deployment_modifications.list = Mock(
return_value=ListResponse(
items=self._deployment_modifications,
metadata=Metadata({'pagination': {'total': 2}})
)
)
dps = self.invoke('cfy deployments modifications list dep1')
assert dps.logs == """Listing modifications of the deployment dep1...
Showing 2 of 2 deployment modifications"""
output_lines = dps.output.split('\n')
deployment_modification_found = 0
for line in output_lines:
if '0229a7d4-0bef-4d95-910d-a341663172e1' in line:
deployment_modification_found += 1
assert 'scale' in line
assert '842686d6-e960-48a6-95b5-250fc26a7ed4' in line
assert 'finished' in line
assert 'default_tenant' in line
assert '2019-08-27 16:05:24' in line
if 'e8962cbd-6645-4c60-9d6d-ee3215b39808' in line:
deployment_modification_found += 1
assert 'scale' in line
assert 'c6bfc3de-ca19-4335-be77-b12edccba582' in line
assert 'started' in line
assert 'default_tenant' in line
assert '2019-08-27 16:35:24' in line
assert deployment_modification_found == 2
def test_deployment_modifications_no_context(self):
deployment_modification = self._deployment_modifications[0]
deployment_modification.pop('context')
self.client.deployment_modifications.list = Mock(
return_value=ListResponse(
items=[deployment_modification],
metadata=Metadata({'pagination': {'total': 1}})
)
)
dps = self.invoke('cfy deployments modifications list dep1')
assert dps.logs == """Listing modifications of the deployment dep1...
Showing 1 of 1 deployment modifications"""
output_lines = dps.output.split('\n')
deployment_modification_found = 0
for line in output_lines:
if '0229a7d4-0bef-4d95-910d-a341663172e1' in line:
deployment_modification_found += 1
assert 'N/A' in line
assert 'finished' in line
assert 'default_tenant' in line
assert '2019-08-27 16:05:24' in line
assert deployment_modification_found == 1
def test_deployment_modifications_get(self):
deployment_modification = self._deployment_modifications[0]
deployment_modification.update(
{
'modified_nodes': {
'node1': []
},
'node_instances': {
'before_modification': [
{'id': 'node1_18fda8', 'node_id': 'node1'},
{'id': 'node2_z3t4uc', 'node_id': 'node2'},
],
'added_and_related': [
{'id': 'node2_z3t4uc', 'node_id': 'node2'},
{'id': 'node1_olbbe0', 'node_id': 'node1',
'modification': 'added'},
]
},
}
)
self.client.deployment_modifications.get = Mock(
return_value=deployment_modification
)
dps = self.invoke('cfy deployments modifications get '
'0229a7d4-0bef-4d95-910d-a341663172e1')
assert dps.logs == 'Retrieving deployment modification ' \
'0229a7d4-0bef-4d95-910d-a341663172e1...'
output_lines = dps.output.split('\n')
assert 'Modified nodes:' in output_lines
assert 'Node instances before modifications:' in output_lines
assert 'Added node instances:' in output_lines
assert 'Node instances before rollback:' not in output_lines
assert 'Removed node instances:' not in output_lines
added_title_idx = output_lines.index('Added node instances:')
assert 'node1_olbbe0 (node1)' in output_lines[added_title_idx + 1]
class DeploymentScheduleTest(CliCommandTest):
def setUp(self):
super(DeploymentScheduleTest, self).setUp()
self.use_manager()
def test_deployment_schedule_create(self):
self.client.execution_schedules.create = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule create dep1 backup '
'-s "12:00" -u "+1w +1d" -r 2d --tz EST')
now = datetime.datetime.utcnow()
expected_since = now.replace(
hour=17, minute=0, second=0, microsecond=0)
expected_until = now.replace(second=0, microsecond=0) + \
datetime.timedelta(days=8)
call_args = list(self.client.execution_schedules.create.call_args)
assert call_args[0][0] == 'backup'
assert call_args[1]['since'] == expected_since
assert call_args[1]['until'] == expected_until
assert call_args[1]['recurrence'] == '2d'
def test_deployment_schedule_create_with_schedule_name(self):
self.client.execution_schedules.create = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule create dep1 backup '
'-n back_me_up -s "1905-6-13 12:00" --tz GMT')
expected_since = \
datetime.datetime.strptime('1905-6-13 12:00', '%Y-%m-%d %H:%M')
call_args = list(self.client.execution_schedules.create.call_args)
assert call_args[0][0] == 'back_me_up'
assert call_args[1]['since'] == expected_since
assert not call_args[1]['recurrence']
assert not call_args[1]['until']
def test_deployment_schedule_create_missing_since(self):
outcome = self.invoke(
'cfy deployments schedule create dep1 backup',
err_str_segment='2', # Exit code
exception=SystemExit
)
self.assertIn("Missing option '-s' / '--since'", outcome.output)
def test_deployment_schedule_create_missing_workflow_id(self):
outcome = self.invoke(
'cfy deployments schedule create dep1 -s "12:33"',
err_str_segment='2', # Exit code
exception=SystemExit
)
self.assertIn("Missing argument 'WORKFLOW_ID'", outcome.output)
def test_deployment_schedule_create_bad_time_expressions(self):
self.client.execution_schedules.create = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
command = 'cfy deployments schedule create dep1 install -s "{}"'
error_msg = '{} is not a legal time format. accepted formats are ' \
'YYYY-MM-DD HH:MM | HH:MM'
illegal_time_formats = ['blah', '15:33:18', '99:99',
'2000/1/1 09:17', '-1 min']
for time_format in illegal_time_formats:
self.invoke(
command.format(time_format),
err_str_segment=error_msg.format(time_format),
exception=NonRecoverableError)
illegal_time_deltas = ['+10 dobosh', '+rez']
for delta in illegal_time_deltas:
self.invoke(
command.format(delta),
err_str_segment='{} is not a legal time delta'.format(
delta.strip('+')),
exception=NonRecoverableError)
def test_deployment_schedule_create_bad_timezone(self):
self.invoke('cfy deployments schedule create dep1 install '
'-s "7:15" --tz Mars/SpaceX',
err_str_segment='Mars/SpaceX is not a recognized timezone',
exception=NonRecoverableError)
def test_deployment_schedule_create_months_delta(self):
self.client.execution_schedules.create = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule create dep backup -s "+13mo"')
call_args = list(self.client.execution_schedules.create.call_args)
now = datetime.datetime.utcnow()
current_month = now.month
current_year = now.year
current_day = now.day
expected_month = 1 if current_month == 12 else current_month + 1
expected_year = current_year + (2 if current_month == 12 else 1)
expected_since = now.replace(
second=0, microsecond=0,
year=expected_year, month=expected_month, day=1)
expected_since += datetime.timedelta(days=current_day - 1)
assert call_args[1]['since'] == expected_since
def test_deployment_schedule_create_years_delta(self):
self.client.execution_schedules.create = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule create dep backup -s "+2y"')
call_args = list(self.client.execution_schedules.create.call_args)
now = datetime.datetime.utcnow()
expected_since = now.replace(second=0, microsecond=0, year=now.year+2)
assert call_args[1]['since'] == expected_since
def test_deployment_schedule_create_hours_minutes_delta(self):
self.client.execution_schedules.create = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule create dep backup '
'-s "+25 hours+119min"')
call_args = list(self.client.execution_schedules.create.call_args)
expected_since = \
(datetime.datetime.utcnow().replace(second=0, microsecond=0) +
datetime.timedelta(days=1, hours=2, minutes=59))
assert call_args[1]['since'] == expected_since
def test_deployment_schedule_update(self):
self.client.execution_schedules.update = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule update dep sched-1 -r "3 weeks" '
'-u "22:00" --tz "Asia/Shanghai"')
expected_until = datetime.datetime.utcnow().replace(
hour=14, minute=0, second=0, microsecond=0)
call_args = list(self.client.execution_schedules.update.call_args)
assert call_args[0][0] == 'sched-1'
assert call_args[1]['recurrence'] == '3 weeks'
assert call_args[1]['until'] == expected_until
def test_deployment_schedule_enable(self):
mock_schedule = MagicMock()
mock_schedule.enabled = False
self.client.execution_schedules.get = MagicMock(
return_value=mock_schedule)
self.client.execution_schedules.update = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule enable dep sched-1')
call_args = list(self.client.execution_schedules.update.call_args)
assert call_args[1]['enabled']
def test_deployment_schedule_enable_already_enabled(self):
mock_schedule = MagicMock()
mock_schedule.enabled = True
self.client.execution_schedules.get = MagicMock(
return_value=mock_schedule)
self.invoke(
'cfy deployments schedule enable dep sched-1',
err_str_segment='Schedule `sched-1` on deployment `dep` is '
'already enabled',
exception=CloudifyCliError)
def test_deployment_schedule_disable(self):
mock_schedule = MagicMock()
mock_schedule.enabled = True
self.client.execution_schedules.get = MagicMock(
return_value=mock_schedule)
self.client.execution_schedules.update = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule disable dep sched-1')
call_args = list(self.client.execution_schedules.update.call_args)
assert not call_args[1]['enabled']
def test_deployment_schedule_disable_already_disabled(self):
mock_schedule = MagicMock()
mock_schedule.enabled = False
self.client.execution_schedules.get = MagicMock(
return_value=mock_schedule)
self.invoke(
'cfy deployments schedule disable dep sched-1',
err_str_segment='Schedule `sched-1` on deployment `dep` is '
'already disabled',
exception=CloudifyCliError)
def test_deployment_schedule_delete(self):
self.client.execution_schedules.delete = MagicMock(
return_value=execution_schedules.ExecutionSchedule({}))
self.invoke('cfy deployments schedule delete dep sched-1')
def test_deployment_schedule_list(self):
self.client.execution_schedules.list = \
self._get_deployment_schedules_list()
output = json.loads(
self.invoke('cfy deployments schedule list --json').output)
assert len(output) == 3
def test_deployment_schedule_list_filter_since(self):
self.client.execution_schedules.list = \
self._get_deployment_schedules_list()
# jan1 will be excluded: has no occurrences at/after Jan 2nd
output = json.loads(
self.invoke('cfy deployments schedule list -s "1900-1-2 0:00" '
'--tz GMT --json').output)
assert len(output) == 2
def test_deployment_schedule_list_filter_until(self):
self.client.execution_schedules.list = \
self._get_deployment_schedules_list()
# jan2_jan3 will be excluded: has no occurrences before Jan 2nd
output = json.loads(
self.invoke('cfy deployments schedule list -u "1900-1-2 0:00" '
'--tz GMT --json').output)
assert len(output) == 2
@staticmethod
def _get_deployment_schedules_list():
schedules = [
{
'id': 'jan1_jan2',
'deployment_id': 'dep1',
'all_next_occurrences': ['1900-1-1 12:00:00',
'1900-1-2 12:00:00'],
},
{
'id': 'jan2_jan3',
'deployment_id': 'dep1',
'all_next_occurrences': ['1900-1-2 12:00:00',
'1900-1-3 12:00:00'],
},
{
'id': 'jan1',
'deployment_id': 'dep2',
'all_next_occurrences': ['1900-1-1 12:00:00'],
}
]
return MagicMock(return_value=MockListResponse(items=schedules))
@staticmethod
def _get_deployment_schedule_detailed(enabled=True):
return MagicMock(
return_value=execution_schedules.ExecutionSchedule({
'id': 'sched_get',
'deployment_id': 'dep3',
'rule': {},
'execution_arguments': {},
'parameters': {},
'enabled': enabled,
'all_next_occurrences': ['1900-1-1 12:00:00',
'1900-1-2 12:00:00',
'1900-1-3 12:00:00']
}))
def test_deployment_schedule_get(self):
self.client.execution_schedules.get = \
self._get_deployment_schedule_detailed()
output = self.invoke('cfy deployments schedule get dep sched_get '
'--preview 2')
self.assertIn('Computed 3 upcoming occurrences. Listing first 2:',
output.output)
self.assertIn('| sched_get | dep3 |', output.output)
self.assertIn('1 1900-1-1 12:00:00', output.output)
self.assertIn('2 1900-1-2 12:00:00', output.output)
def test_deployment_schedule_get_no_preview(self):
self.client.execution_schedules.get = \
self._get_deployment_schedule_detailed()
output = self.invoke('cfy deployments schedule get dep sched_get')
self.assertIn('| sched_get | dep3 |', output.output)
self.assertNotIn('Computed 3 upcoming occurrences', output.output)
def test_deployment_schedule_get_no_preview_because_disabled(self):
self.client.execution_schedules.get = \
self._get_deployment_schedule_detailed(enabled=False)
output = self.invoke(
'cfy deployments schedule get dep sched_get --preview 1',
err_str_segment='Deployment schedule sched_get is disabled, '
'no upcoming occurrences',
exception=CloudifyCliError)
self.assertIn('| sched_get | dep3 |', output.output)
| apache-2.0 | -2,392,071,210,047,672,000 | 40.339897 | 79 | 0.589334 | false |
Scille/parsec-cloud | parsec/backend/vlob.py | 1 | 13190 | # Parsec Cloud (https://parsec.cloud) Copyright (c) AGPLv3 2016-2021 Scille SAS
from typing import List, Tuple, Dict, Optional
from uuid import UUID
import pendulum
from parsec.utils import timestamps_in_the_ballpark
from parsec.api.protocol import (
DeviceID,
OrganizationID,
vlob_create_serializer,
vlob_read_serializer,
vlob_update_serializer,
vlob_poll_changes_serializer,
vlob_list_versions_serializer,
vlob_maintenance_get_reencryption_batch_serializer,
vlob_maintenance_save_reencryption_batch_serializer,
)
from parsec.backend.utils import catch_protocol_errors, api
class VlobError(Exception):
pass
class VlobAccessError(VlobError):
pass
class VlobVersionError(VlobError):
pass
class VlobTimestampError(VlobError):
pass
class VlobNotFoundError(VlobError):
pass
class VlobAlreadyExistsError(VlobError):
pass
class VlobEncryptionRevisionError(VlobError):
pass
class VlobInMaintenanceError(VlobError):
pass
class VlobNotInMaintenanceError(VlobError):
pass
class VlobMaintenanceError(VlobError):
pass
class BaseVlobComponent:
@api("vlob_create")
@catch_protocol_errors
async def api_vlob_create(self, client_ctx, msg):
msg = vlob_create_serializer.req_load(msg)
now = pendulum.now()
if not timestamps_in_the_ballpark(msg["timestamp"], now):
return {"status": "bad_timestamp", "reason": f"Timestamp is out of date."}
try:
await self.create(client_ctx.organization_id, client_ctx.device_id, **msg)
except VlobAlreadyExistsError as exc:
return vlob_create_serializer.rep_dump({"status": "already_exists", "reason": str(exc)})
except VlobAccessError:
return vlob_create_serializer.rep_dump({"status": "not_allowed"})
except VlobEncryptionRevisionError:
return vlob_create_serializer.rep_dump({"status": "bad_encryption_revision"})
except VlobInMaintenanceError:
return vlob_create_serializer.rep_dump({"status": "in_maintenance"})
return vlob_create_serializer.rep_dump({"status": "ok"})
@api("vlob_read")
@catch_protocol_errors
async def api_vlob_read(self, client_ctx, msg):
msg = vlob_read_serializer.req_load(msg)
try:
version, blob, author, created_on = await self.read(
client_ctx.organization_id, client_ctx.device_id, **msg
)
except VlobNotFoundError as exc:
return vlob_read_serializer.rep_dump({"status": "not_found", "reason": str(exc)})
except VlobAccessError:
return vlob_read_serializer.rep_dump({"status": "not_allowed"})
except VlobVersionError:
return vlob_read_serializer.rep_dump({"status": "bad_version"})
except VlobTimestampError:
return vlob_read_serializer.rep_dump({"status": "bad_timestamp"})
except VlobEncryptionRevisionError:
return vlob_create_serializer.rep_dump({"status": "bad_encryption_revision"})
except VlobInMaintenanceError:
return vlob_read_serializer.rep_dump({"status": "in_maintenance"})
return vlob_read_serializer.rep_dump(
{
"status": "ok",
"blob": blob,
"version": version,
"author": author,
"timestamp": created_on,
}
)
@api("vlob_update")
@catch_protocol_errors
async def api_vlob_update(self, client_ctx, msg):
msg = vlob_update_serializer.req_load(msg)
now = pendulum.now()
if not timestamps_in_the_ballpark(msg["timestamp"], now):
return {"status": "bad_timestamp", "reason": f"Timestamp is out of date."}
try:
await self.update(client_ctx.organization_id, client_ctx.device_id, **msg)
except VlobNotFoundError as exc:
return vlob_update_serializer.rep_dump({"status": "not_found", "reason": str(exc)})
except VlobAccessError:
return vlob_update_serializer.rep_dump({"status": "not_allowed"})
except VlobVersionError:
return vlob_update_serializer.rep_dump({"status": "bad_version"})
except VlobTimestampError:
return vlob_update_serializer.rep_dump({"status": "bad_timestamp"})
except VlobEncryptionRevisionError:
return vlob_create_serializer.rep_dump({"status": "bad_encryption_revision"})
except VlobInMaintenanceError:
return vlob_update_serializer.rep_dump({"status": "in_maintenance"})
return vlob_update_serializer.rep_dump({"status": "ok"})
@api("vlob_poll_changes")
@catch_protocol_errors
async def api_vlob_poll_changes(self, client_ctx, msg):
msg = vlob_poll_changes_serializer.req_load(msg)
# TODO: raise error if too many events since offset ?
try:
checkpoint, changes = await self.poll_changes(
client_ctx.organization_id,
client_ctx.device_id,
msg["realm_id"],
msg["last_checkpoint"],
)
except VlobAccessError:
return vlob_poll_changes_serializer.rep_dump({"status": "not_allowed"})
except VlobNotFoundError as exc:
return vlob_poll_changes_serializer.rep_dump(
{"status": "not_found", "reason": str(exc)}
)
except VlobInMaintenanceError:
return vlob_poll_changes_serializer.rep_dump({"status": "in_maintenance"})
return vlob_poll_changes_serializer.rep_dump(
{"status": "ok", "current_checkpoint": checkpoint, "changes": changes}
)
@api("vlob_list_versions")
@catch_protocol_errors
async def api_vlob_list_versions(self, client_ctx, msg):
msg = vlob_list_versions_serializer.req_load(msg)
try:
versions_dict = await self.list_versions(
client_ctx.organization_id, client_ctx.device_id, msg["vlob_id"]
)
except VlobAccessError:
return vlob_list_versions_serializer.rep_dump({"status": "not_allowed"})
except VlobNotFoundError as exc:
return vlob_list_versions_serializer.rep_dump(
{"status": "not_found", "reason": str(exc)}
)
except VlobInMaintenanceError:
return vlob_list_versions_serializer.rep_dump({"status": "in_maintenance"})
return vlob_list_versions_serializer.rep_dump({"status": "ok", "versions": versions_dict})
@api("vlob_maintenance_get_reencryption_batch")
@catch_protocol_errors
async def api_vlob_maintenance_get_reencryption_batch(self, client_ctx, msg):
msg = vlob_maintenance_get_reencryption_batch_serializer.req_load(msg)
try:
batch = await self.maintenance_get_reencryption_batch(
client_ctx.organization_id, client_ctx.device_id, **msg
)
except VlobAccessError:
return vlob_maintenance_get_reencryption_batch_serializer.rep_dump(
{"status": "not_allowed"}
)
except VlobNotFoundError as exc:
return vlob_maintenance_get_reencryption_batch_serializer.rep_dump(
{"status": "not_found", "reason": str(exc)}
)
except VlobNotInMaintenanceError as exc:
return vlob_maintenance_get_reencryption_batch_serializer.rep_dump(
{"status": "not_in_maintenance", "reason": str(exc)}
)
except VlobEncryptionRevisionError:
return vlob_create_serializer.rep_dump({"status": "bad_encryption_revision"})
except VlobMaintenanceError as exc:
return vlob_maintenance_get_reencryption_batch_serializer.rep_dump(
{"status": "maintenance_error", "reason": str(exc)}
)
return vlob_maintenance_get_reencryption_batch_serializer.rep_dump(
{
"status": "ok",
"batch": [
{"vlob_id": vlob_id, "version": version, "blob": blob}
for vlob_id, version, blob in batch
],
}
)
@api("vlob_maintenance_save_reencryption_batch")
@catch_protocol_errors
async def api_vlob_maintenance_save_reencryption_batch(self, client_ctx, msg):
msg = vlob_maintenance_save_reencryption_batch_serializer.req_load(msg)
try:
total, done = await self.maintenance_save_reencryption_batch(
client_ctx.organization_id,
client_ctx.device_id,
realm_id=msg["realm_id"],
encryption_revision=msg["encryption_revision"],
batch=[(x["vlob_id"], x["version"], x["blob"]) for x in msg["batch"]],
)
except VlobAccessError:
return vlob_maintenance_save_reencryption_batch_serializer.rep_dump(
{"status": "not_allowed"}
)
except VlobNotFoundError as exc:
return vlob_maintenance_save_reencryption_batch_serializer.rep_dump(
{"status": "not_found", "reason": str(exc)}
)
except VlobNotInMaintenanceError as exc:
return vlob_maintenance_get_reencryption_batch_serializer.rep_dump(
{"status": "not_in_maintenance", "reason": str(exc)}
)
except VlobEncryptionRevisionError:
return vlob_create_serializer.rep_dump({"status": "bad_encryption_revision"})
except VlobMaintenanceError as exc:
return vlob_maintenance_save_reencryption_batch_serializer.rep_dump(
{"status": "maintenance_error", "reason": str(exc)}
)
return vlob_maintenance_save_reencryption_batch_serializer.rep_dump(
{"status": "ok", "total": total, "done": done}
)
async def create(
self,
organization_id: OrganizationID,
author: DeviceID,
realm_id: UUID,
encryption_revision: int,
vlob_id: UUID,
timestamp: pendulum.DateTime,
blob: bytes,
) -> None:
"""
Raises:
VlobAlreadyExistsError
VlobEncryptionRevisionError: if encryption_revision mismatch
VlobInMaintenanceError
"""
raise NotImplementedError()
async def read(
self,
organization_id: OrganizationID,
author: DeviceID,
encryption_revision: int,
vlob_id: UUID,
version: Optional[int] = None,
timestamp: Optional[pendulum.DateTime] = None,
) -> Tuple[int, bytes, DeviceID, pendulum.DateTime]:
"""
Raises:
VlobAccessError
VlobVersionError
VlobNotFoundError
VlobEncryptionRevisionError: if encryption_revision mismatch
VlobInMaintenanceError
"""
raise NotImplementedError()
async def update(
self,
organization_id: OrganizationID,
author: DeviceID,
encryption_revision: int,
vlob_id: UUID,
version: int,
timestamp: pendulum.DateTime,
blob: bytes,
) -> None:
"""
Raises:
VlobAccessError
VlobVersionError
VlobTimestampError
VlobNotFoundError
VlobEncryptionRevisionError: if encryption_revision mismatch
VlobInMaintenanceError
"""
raise NotImplementedError()
async def poll_changes(
self, organization_id: OrganizationID, author: DeviceID, realm_id: UUID, checkpoint: int
) -> Tuple[int, Dict[UUID, int]]:
"""
Raises:
VlobInMaintenanceError
VlobNotFoundError
VlobAccessError
"""
raise NotImplementedError()
async def list_versions(
self, organization_id: OrganizationID, author: DeviceID, vlob_id: UUID
) -> Dict[int, Tuple[pendulum.DateTime, DeviceID]]:
"""
Raises:
VlobInMaintenanceError
VlobNotFoundError
VlobAccessError
"""
raise NotImplementedError()
async def maintenance_get_reencryption_batch(
self,
organization_id: OrganizationID,
author: DeviceID,
realm_id: UUID,
encryption_revision: int,
size: int,
) -> List[Tuple[UUID, int, bytes]]:
"""
Raises:
VlobNotFoundError
VlobAccessError
VlobEncryptionRevisionError
VlobMaintenanceError: not in maintenance
"""
raise NotImplementedError()
async def maintenance_save_reencryption_batch(
self,
organization_id: OrganizationID,
author: DeviceID,
realm_id: UUID,
encryption_revision: int,
batch: List[Tuple[UUID, int, bytes]],
) -> Tuple[int, int]:
"""
Raises:
VlobNotFoundError
VlobAccessError
VlobEncryptionRevisionError
VlobMaintenanceError: not in maintenance
"""
raise NotImplementedError()
| agpl-3.0 | -1,780,366,703,321,259,500 | 31.407862 | 100 | 0.605762 | false |
nakagami/reportlab | src/reportlab/pdfbase/ttfonts.py | 1 | 45587 | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
__version__ = '$Id: ttfonts.py 3959 2012-09-27 14:39:39Z robin $'
__doc__="""TrueType font support
This defines classes to represent TrueType fonts. They know how to calculate
their own width and how to write themselves into PDF files. They support
subsetting and embedding and can represent all 16-bit Unicode characters.
Note on dynamic fonts
---------------------
Usually a Font in ReportLab corresponds to a fixed set of PDF objects (Font,
FontDescriptor, Encoding). But with dynamic font subsetting a single TTFont
will result in a number of Font/FontDescriptor/Encoding object sets, and the
contents of those will depend on the actual characters used for printing.
To support dynamic font subsetting a concept of "dynamic font" was introduced.
Dynamic Fonts have a _dynamicFont attribute set to 1.
Dynamic fonts have the following additional functions::
def splitString(self, text, doc):
'''Splits text into a number of chunks, each of which belongs to a
single subset. Returns a list of tuples (subset, string). Use
subset numbers with getSubsetInternalName. Doc is used to identify
a document so that different documents may have different dynamically
constructed subsets.'''
def getSubsetInternalName(self, subset, doc):
'''Returns the name of a PDF Font object corresponding to a given
subset of this dynamic font. Use this function instead of
PDFDocument.getInternalFontName.'''
You must never call PDFDocument.getInternalFontName for dynamic fonts.
If you have a traditional static font, mapping to PDF text output operators
is simple::
'%s 14 Tf (%s) Tj' % (getInternalFontName(psfontname), text)
If you have a dynamic font, use this instead::
for subset, chunk in font.splitString(text, doc):
'%s 14 Tf (%s) Tj' % (font.getSubsetInternalName(subset, doc), chunk)
(Tf is a font setting operator and Tj is a text ouput operator. You should
also escape invalid characters in Tj argument, see TextObject._formatText.
Oh, and that 14 up there is font size.)
Canvas and TextObject have special support for dynamic fonts.
"""
import sys
import string
from struct import pack, unpack, error as structError
from reportlab.lib.utils import getBytesIO, isUnicodeType
from reportlab.pdfbase import pdfmetrics, pdfdoc
from reportlab import rl_config
class TTFError(pdfdoc.PDFError):
"TrueType font exception"
pass
if sys.version_info[0] == 3:
def SUBSETN(n,table=bytes.maketrans(b'0123456789',b'ABCDEFGHIJ')):
return ('%6.6d'%n).translate(table)
else:
def SUBSETN(n,table=string.maketrans('0123456789','ABCDEFGHIJ')):
return ('%6.6d'%n).translate(table)
#
# Helpers
#
from codecs import utf_8_encode, latin_1_decode
def latin1_to_utf8(text):
"helper to convert when needed from latin input"
return utf_8_encode(latin_1_decode(text)[0])[0]
def makeToUnicodeCMap(fontname, subset):
"""Creates a ToUnicode CMap for a given subset. See Adobe
_PDF_Reference (ISBN 0-201-75839-3) for more information."""
cmap = [
"/CIDInit /ProcSet findresource begin",
"12 dict begin",
"begincmap",
"/CIDSystemInfo",
"<< /Registry (%s)" % fontname,
"/Ordering (%s)" % fontname,
"/Supplement 0",
">> def",
"/CMapName /%s def" % fontname,
"/CMapType 2 def",
"1 begincodespacerange",
"<00> <%02X>" % (len(subset) - 1),
"endcodespacerange",
"%d beginbfchar" % len(subset)
] + ["<%02X> <%04X>" % (i,v) for i,v in enumerate(subset)] + [
"endbfchar",
"endcmap",
"CMapName currentdict /CMap defineresource pop",
"end",
"end"
]
return '\n'.join(cmap)
def splice(stream, offset, value):
"""Splices the given value into stream at the given offset and
returns the resulting stream (the original is unchanged)"""
return stream[:offset] + value + stream[offset + len(value):]
def _set_ushort(stream, offset, value):
"""Writes the given unsigned short value into stream at the given
offset and returns the resulting stream (the original is unchanged)"""
return splice(stream, offset, pack(">H", value))
try:
import _rl_accel
except ImportError:
try:
from reportlab.lib import _rl_accel
except ImportError:
_rl_accel = None
try:
hex32 = _rl_accel.hex32
except:
def hex32(i):
return '0X%8.8X' % (i&0xFFFFFFFF)
try:
add32 = _rl_accel.add32L
calcChecksum = _rl_accel.calcChecksumL
except:
def add32(x, y):
"Calculate (x + y) modulo 2**32"
return (x+y) & 0xFFFFFFFF
def calcChecksum(data):
"""Calculates TTF-style checksums"""
if len(data)&3: data = data + (4-(len(data)&3))*b"\0"
return sum(unpack(">%dl" % (len(data)>>2), data)) & 0xFFFFFFFF
del _rl_accel
#
# TrueType font handling
#
GF_ARG_1_AND_2_ARE_WORDS = 1 << 0
GF_ARGS_ARE_XY_VALUES = 1 << 1
GF_ROUND_XY_TO_GRID = 1 << 2
GF_WE_HAVE_A_SCALE = 1 << 3
GF_RESERVED = 1 << 4
GF_MORE_COMPONENTS = 1 << 5
GF_WE_HAVE_AN_X_AND_Y_SCALE = 1 << 6
GF_WE_HAVE_A_TWO_BY_TWO = 1 << 7
GF_WE_HAVE_INSTRUCTIONS = 1 << 8
GF_USE_MY_METRICS = 1 << 9
GF_OVERLAP_COMPOUND = 1 << 10
GF_SCALED_COMPONENT_OFFSET = 1 << 11
GF_UNSCALED_COMPONENT_OFFSET = 1 << 12
def TTFOpenFile(fn):
'''Opens a TTF file possibly after searching TTFSearchPath
returns (filename,file)
'''
from reportlab.lib.utils import rl_isfile, open_for_read
try:
f = open_for_read(fn,'rb')
return fn, f
except IOError:
import os
if not os.path.isabs(fn):
for D in rl_config.TTFSearchPath:
tfn = os.path.join(D,fn)
if rl_isfile(tfn):
f = open_for_read(tfn,'rb')
return tfn, f
raise TTFError('Can\'t open file "%s"' % fn)
class TTFontParser:
"Basic TTF file parser"
ttfVersions = (0x00010000,0x74727565,0x74746366)
ttcVersions = (0x00010000,0x00020000)
fileKind='TTF'
def __init__(self, file, validate=0,subfontIndex=0):
"""Loads and parses a TrueType font file. file can be a filename or a
file object. If validate is set to a false values, skips checksum
validation. This can save time, especially if the font is large.
"""
self.validate = validate
self.readFile(file)
isCollection = self.readHeader()
if isCollection:
self.readTTCHeader()
self.getSubfont(subfontIndex)
else:
if self.validate: self.checksumFile()
self.readTableDirectory()
self.subfontNameX = ''
def readTTCHeader(self):
self.ttcVersion = self.read_ulong()
self.fileKind = 'TTC'
self.ttfVersions = self.ttfVersions[:-1]
if self.ttcVersion not in self.ttcVersions:
raise TTFError('"%s" is not a %s file: can\'t read version 0x%8.8x' %(self.filename,self.fileKind,self.ttcVersion))
self.numSubfonts = self.read_ulong()
self.subfontOffsets = []
a = self.subfontOffsets.append
for i in range(self.numSubfonts):
a(self.read_ulong())
def getSubfont(self,subfontIndex):
if self.fileKind!='TTC':
raise TTFError('"%s" is not a TTC file: use this method' % (self.filename,self.fileKind))
try:
pos = self.subfontOffsets[subfontIndex]
except IndexError:
raise TTFError('TTC file "%s": bad subfontIndex %s not in [0,%d]' % (self.filename,subfontIndex,self.numSubfonts-1))
self.seek(pos)
self.readHeader()
self.readTableDirectory()
self.subfontNameX = '-'+str(subfontIndex)
def readTableDirectory(self):
try:
self.numTables = self.read_ushort()
self.searchRange = self.read_ushort()
self.entrySelector = self.read_ushort()
self.rangeShift = self.read_ushort()
# Read table directory
self.table = {}
self.tables = []
for n in range(self.numTables):
record = {}
record['tag'] = self.read_tag()
record['checksum'] = self.read_ulong()
record['offset'] = self.read_ulong()
record['length'] = self.read_ulong()
self.tables.append(record)
self.table[record['tag']] = record
except:
raise TTFError('Corrupt %s file "%s" cannot read Table Directory' % (self.fileKind, self.filename))
if self.validate: self.checksumTables()
def readHeader(self):
'''read the sfnt header at the current position'''
try:
self.version = version = self.read_ulong()
except:
raise TTFError('"%s" is not a %s file: can\'t read version' %(self.filename,self.fileKind))
if version==0x4F54544F:
raise TTFError('%s file "%s": postscript outlines are not supported'%(self.fileKind,self.filename))
if version not in self.ttfVersions:
raise TTFError('Not a TrueType font: version=0x%8.8X' % version)
return version==self.ttfVersions[-1]
def readFile(self,f):
if hasattr(f,'read'):
self.filename = '(ttf)'
else:
self.filename, f = TTFOpenFile(f)
self._ttf_data = f.read()
self._pos = 0
def checksumTables(self):
# Check the checksums for all tables
for t in self.tables:
table = self.get_chunk(t['offset'], t['length'])
checksum = calcChecksum(table)
if t['tag'] == 'head':
adjustment = unpack('>l', table[8:8+4])[0]
checksum = add32(checksum, -adjustment)
xchecksum = t['checksum']
if xchecksum != checksum:
raise TTFError('TTF file "%s": invalid checksum %s table: %s (expected %s)' % (self.filename,hex32(checksum),t['tag'],hex32(xchecksum)))
def checksumFile(self):
# Check the checksums for the whole file
checksum = calcChecksum(self._ttf_data)
if 0xB1B0AFBA!=checksum:
raise TTFError('TTF file "%s": invalid checksum %s (expected 0xB1B0AFBA) len: %d &3: %d' % (self.filename,hex32(checksum),len(self._ttf_data),(len(self._ttf_data)&3)))
def get_table_pos(self, tag):
"Returns the offset and size of a given TTF table."
offset = self.table[tag]['offset']
length = self.table[tag]['length']
return (offset, length)
def seek(self, pos):
"Moves read pointer to a given offset in file."
self._pos = pos
def skip(self, delta):
"Skip the given number of bytes."
self._pos = self._pos + delta
def seek_table(self, tag, offset_in_table = 0):
"""Moves read pointer to the given offset within a given table and
returns absolute offset of that position in the file."""
self._pos = self.get_table_pos(tag)[0] + offset_in_table
return self._pos
def read_tag(self):
"Read a 4-character tag"
self._pos += 4
s = self._ttf_data[self._pos - 4:self._pos]
if sys.version_info[0]==3 and not (s is str):
s = s.decode('utf-8')
return s
def read_ushort(self):
"Reads an unsigned short"
self._pos += 2
return unpack('>H',self._ttf_data[self._pos-2:self._pos])[0]
def read_ulong(self):
"Reads an unsigned long"
self._pos += 4
return unpack('>L',self._ttf_data[self._pos - 4:self._pos])[0]
def read_short(self):
"Reads a signed short"
self._pos += 2
try:
return unpack('>h',self._ttf_data[self._pos-2:self._pos])[0]
except structError as error:
raise TTFError(error)
def get_ushort(self, pos):
"Return an unsigned short at given position"
return unpack('>H',self._ttf_data[pos:pos+2])[0]
def get_ulong(self, pos):
"Return an unsigned long at given position"
return unpack('>L',self._ttf_data[pos:pos+4])[0]
def get_chunk(self, pos, length):
"Return a chunk of raw data at given position"
return self._ttf_data[pos:pos+length]
def get_table(self, tag):
"Return the given TTF table"
pos, length = self.get_table_pos(tag)
return self._ttf_data[pos:pos+length]
class TTFontMaker:
"Basic TTF file generator"
def __init__(self):
"Initializes the generator."
self.tables = {}
def add(self, tag, data):
"Adds a table to the TTF file."
if tag == 'head':
data = splice(data, 8, b'\0\0\0\0')
self.tables[tag] = data
def makeStream(self):
"Finishes the generation and returns the TTF file as a string"
stm = getBytesIO()
write = stm.write
numTables = len(self.tables)
searchRange = 1
entrySelector = 0
while searchRange * 2 <= numTables:
searchRange = searchRange * 2
entrySelector = entrySelector + 1
searchRange = searchRange * 16
rangeShift = numTables * 16 - searchRange
# Header
write(pack(">lHHHH", 0x00010000, numTables, searchRange,
entrySelector, rangeShift))
# Table directory
tables = list(self.tables.items())
tables.sort() # XXX is this the correct order?
offset = 12 + numTables * 16
for tag, data in tables:
if tag == 'head':
head_start = offset
checksum = calcChecksum(data)
if isUnicodeType(tag):
tag = tag.encode('utf-8')
write(tag)
write(pack(">LLL", checksum, offset, len(data)))
paddedLength = (len(data)+3)&~3
offset = offset + paddedLength
# Table data
for tag, data in tables:
data += b"\0\0\0"
write(data[:len(data)&~3])
checksum = calcChecksum(stm.getvalue())
checksum = add32(0xB1B0AFBA, -checksum)
stm.seek(head_start + 8)
write(pack('>L', checksum))
return stm.getvalue()
class TTFontFile(TTFontParser):
"TTF file parser and generator"
def __init__(self, file, charInfo=1, validate=0,subfontIndex=0):
"""Loads and parses a TrueType font file.
file can be a filename or a file object. If validate is set to a false
values, skips checksum validation. This can save time, especially if
the font is large. See TTFontFile.extractInfo for more information.
"""
TTFontParser.__init__(self, file, validate=validate,subfontIndex=subfontIndex)
self.extractInfo(charInfo)
def extractInfo(self, charInfo=1):
"""
Extract typographic information from the loaded font file.
The following attributes will be set::
name PostScript font name
flags Font flags
ascent Typographic ascender in 1/1000ths of a point
descent Typographic descender in 1/1000ths of a point
capHeight Cap height in 1/1000ths of a point (0 if not available)
bbox Glyph bounding box [l,t,r,b] in 1/1000ths of a point
_bbox Glyph bounding box [l,t,r,b] in unitsPerEm
unitsPerEm Glyph units per em
italicAngle Italic angle in degrees ccw
stemV stem weight in 1/1000ths of a point (approximate)
If charInfo is true, the following will also be set::
defaultWidth default glyph width in 1/1000ths of a point
charWidths dictionary of character widths for every supported UCS character
code
This will only work if the font has a Unicode cmap (platform 3,
encoding 1, format 4 or platform 0 any encoding format 4). Setting
charInfo to false avoids this requirement
"""
# name - Naming table
name_offset = self.seek_table("name")
format = self.read_ushort()
if format != 0:
raise TTFError("Unknown name table format (%d)" % format)
numRecords = self.read_ushort()
string_data_offset = name_offset + self.read_ushort()
names = {1:None,2:None,3:None,4:None,6:None}
K = names.keys()
nameCount = len(names)
for i in range(numRecords):
platformId = self.read_ushort()
encodingId = self.read_ushort()
languageId = self.read_ushort()
nameId = self.read_ushort()
length = self.read_ushort()
offset = self.read_ushort()
if nameId not in K: continue
N = None
if platformId == 3 and encodingId == 1 and languageId == 0x409: # Microsoft, Unicode, US English, PS Name
opos = self._pos
try:
self.seek(string_data_offset + offset)
if length % 2 != 0:
raise TTFError("PostScript name is UTF-16BE string of odd length")
length /= 2
N = []
A = N.append
while length > 0:
char = self.read_ushort()
A(chr(char))
length -= 1
N = ''.join(N)
finally:
self._pos = opos
elif platformId == 1 and encodingId == 0 and languageId == 0: # Macintosh, Roman, English, PS Name
# According to OpenType spec, if PS name exists, it must exist
# both in MS Unicode and Macintosh Roman formats. Apparently,
# you can find live TTF fonts which only have Macintosh format.
N = self.get_chunk(string_data_offset + offset, length)
if N and names[nameId]==None:
if sys.version_info[0]==3 and not (N is str):
N = N.decode('utf-8')
names[nameId] = N
nameCount -= 1
if nameCount==0: break
if names[6] is not None:
psName = names[6].replace(" ", "-") #Dinu Gherman's fix for font names with spaces
elif names[4] is not None:
psName = names[4].replace(" ", "-")
# Fine, one last try before we bail.
elif names[1] is not None:
psName = names[1].replace(" ", "-")
else:
psName = None
# Don't just assume, check for None since some shoddy fonts cause crashes here...
if not psName:
raise TTFError("Could not find PostScript font name")
for c in psName:
oc = ord(c)
if oc>126 or c in ' [](){}<>/%':
raise TTFError("psName=%r contains invalid character '%s' ie U+%04X" % (psName,c,ord(c)))
self.name = psName
self.familyName = names[1] or psName
self.styleName = names[2] or 'Regular'
self.fullName = names[4] or psName
self.uniqueFontID = names[3] or psName
# head - Font header table
self.seek_table("head")
ver_maj, ver_min = self.read_ushort(), self.read_ushort()
if ver_maj != 1:
raise TTFError('Unknown head table version %d.%04x' % (ver_maj, ver_min))
self.fontRevision = self.read_ushort(), self.read_ushort()
self.skip(4)
magic = self.read_ulong()
if magic != 0x5F0F3CF5:
raise TTFError('Invalid head table magic %04x' % magic)
self.skip(2)
self.unitsPerEm = unitsPerEm = self.read_ushort()
scale = lambda x, unitsPerEm=unitsPerEm: x * 1000. / unitsPerEm
self.skip(16)
xMin = self.read_short()
yMin = self.read_short()
xMax = self.read_short()
yMax = self.read_short()
self.bbox = [scale(i) for i in [xMin, yMin, xMax, yMax]]
self.skip(3*2)
indexToLocFormat = self.read_ushort()
glyphDataFormat = self.read_ushort()
# OS/2 - OS/2 and Windows metrics table
# (needs data from head table)
if "OS/2" in self.table:
self.seek_table("OS/2")
version = self.read_ushort()
self.skip(2)
usWeightClass = self.read_ushort()
self.skip(2)
fsType = self.read_ushort()
if fsType == 0x0002 or (fsType & 0x0300) != 0:
raise TTFError('Font does not allow subsetting/embedding (%04X)' % fsType)
self.skip(58) #11*2 + 10 + 4*4 + 4 + 3*2
sTypoAscender = self.read_short()
sTypoDescender = self.read_short()
self.ascent = scale(sTypoAscender) # XXX: for some reason it needs to be multiplied by 1.24--1.28
self.descent = scale(sTypoDescender)
if version > 1:
self.skip(16) #3*2 + 2*4 + 2
sCapHeight = self.read_short()
self.capHeight = scale(sCapHeight)
else:
self.capHeight = self.ascent
else:
# Microsoft TTFs require an OS/2 table; Apple ones do not. Try to
# cope. The data is not very important anyway.
usWeightClass = 500
self.ascent = scale(yMax)
self.descent = scale(yMin)
self.capHeight = self.ascent
# There's no way to get stemV from a TTF file short of analyzing actual outline data
# This fuzzy formula is taken from pdflib sources, but we could just use 0 here
self.stemV = 50 + int((usWeightClass / 65.0) ** 2)
# post - PostScript table
# (needs data from OS/2 table)
self.seek_table("post")
ver_maj, ver_min = self.read_ushort(), self.read_ushort()
if ver_maj not in (1, 2, 3, 4):
# Adobe/MS documents 1, 2, 2.5, 3; Apple also has 4.
# From Apple docs it seems that we do not need to care
# about the exact version, so if you get this error, you can
# try to remove this check altogether.
raise TTFError('Unknown post table version %d.%04x' % (ver_maj, ver_min))
self.italicAngle = self.read_short() + self.read_ushort() / 65536.0
self.underlinePosition = self.read_short()
self.underlineThickness = self.read_short()
isFixedPitch = self.read_ulong()
self.flags = FF_SYMBOLIC # All fonts that contain characters
# outside the original Adobe character
# set are considered "symbolic".
if self.italicAngle!= 0:
self.flags = self.flags | FF_ITALIC
if usWeightClass >= 600: # FW_REGULAR == 500, FW_SEMIBOLD == 600
self.flags = self.flags | FF_FORCEBOLD
if isFixedPitch:
self.flags = self.flags | FF_FIXED
# XXX: FF_SERIF? FF_SCRIPT? FF_ALLCAP? FF_SMALLCAP?
# hhea - Horizontal header table
self.seek_table("hhea")
ver_maj, ver_min = self.read_ushort(), self.read_ushort()
if ver_maj != 1:
raise TTFError('Unknown hhea table version %d.%04x' % (ver_maj, ver_min))
self.skip(28)
metricDataFormat = self.read_ushort()
if metricDataFormat != 0:
raise TTFError('Unknown horizontal metric data format (%d)' % metricDataFormat)
numberOfHMetrics = self.read_ushort()
if numberOfHMetrics == 0:
raise TTFError('Number of horizontal metrics is 0')
# maxp - Maximum profile table
self.seek_table("maxp")
ver_maj, ver_min = self.read_ushort(), self.read_ushort()
if ver_maj != 1:
raise TTFError('Unknown maxp table version %d.%04x' % (ver_maj, ver_min))
numGlyphs = self.read_ushort()
if not charInfo:
self.charToGlyph = None
self.defaultWidth = None
self.charWidths = None
return
if glyphDataFormat != 0:
raise TTFError('Unknown glyph data format (%d)' % glyphDataFormat)
# cmap - Character to glyph index mapping table
cmap_offset = self.seek_table("cmap")
self.skip(2)
cmapTableCount = self.read_ushort()
unicode_cmap_offset = None
for n in range(cmapTableCount):
platformID = self.read_ushort()
encodingID = self.read_ushort()
offset = self.read_ulong()
if platformID == 3 and encodingID == 1: # Microsoft, Unicode
format = self.get_ushort(cmap_offset + offset)
if format == 4:
unicode_cmap_offset = cmap_offset + offset
break
elif platformID == 0: # Unicode -- assume all encodings are compatible
format = self.get_ushort(cmap_offset + offset)
if format == 4:
unicode_cmap_offset = cmap_offset + offset
break
if unicode_cmap_offset is None:
raise TTFError('Font does not have cmap for Unicode (platform 3, encoding 1, format 4 or platform 0 any encoding format 4)')
self.seek(unicode_cmap_offset + 2)
length = self.read_ushort()
limit = unicode_cmap_offset + length
self.skip(2)
segCount = int(self.read_ushort() / 2.0)
self.skip(6)
endCount = list(map(lambda x, self=self: self.read_ushort(), range(segCount)))
self.skip(2)
startCount = list(map(lambda x, self=self: self.read_ushort(), range(segCount)))
idDelta =list(map(lambda x, self=self: self.read_short(), range(segCount)))
idRangeOffset_start = self._pos
idRangeOffset = list(map(lambda x, self=self: self.read_ushort(), range(segCount)))
# Now it gets tricky.
glyphToChar = {}
charToGlyph = {}
for n in range(segCount):
for unichar in range(startCount[n], endCount[n] + 1):
if idRangeOffset[n] == 0:
glyph = (unichar + idDelta[n]) & 0xFFFF
else:
offset = (unichar - startCount[n]) * 2 + idRangeOffset[n]
offset = idRangeOffset_start + 2 * n + offset
if offset >= limit:
# workaround for broken fonts (like Thryomanes)
glyph = 0
else:
glyph = self.get_ushort(offset)
if glyph != 0:
glyph = (glyph + idDelta[n]) & 0xFFFF
charToGlyph[unichar] = glyph
if glyph in glyphToChar:
glyphToChar[glyph].append(unichar)
else:
glyphToChar[glyph] = [unichar]
self.charToGlyph = charToGlyph
# hmtx - Horizontal metrics table
# (needs data from hhea, maxp, and cmap tables)
self.seek_table("hmtx")
aw = None
self.charWidths = {}
self.hmetrics = []
for glyph in range(numberOfHMetrics):
# advance width and left side bearing. lsb is actually signed
# short, but we don't need it anyway (except for subsetting)
aw, lsb = self.read_ushort(), self.read_ushort()
self.hmetrics.append((aw, lsb))
aw = scale(aw)
if glyph == 0:
self.defaultWidth = aw
if glyph in glyphToChar:
for char in glyphToChar[glyph]:
self.charWidths[char] = aw
for glyph in range(numberOfHMetrics, numGlyphs):
# the rest of the table only lists advance left side bearings.
# so we reuse aw set by the last iteration of the previous loop
lsb = self.read_ushort()
self.hmetrics.append((aw, lsb))
if glyph in glyphToChar:
for char in glyphToChar[glyph]:
self.charWidths[char] = aw
# loca - Index to location
self.seek_table('loca')
self.glyphPos = []
if indexToLocFormat == 0:
for n in range(numGlyphs + 1):
self.glyphPos.append(self.read_ushort() << 1)
elif indexToLocFormat == 1:
for n in range(numGlyphs + 1):
self.glyphPos.append(self.read_ulong())
else:
raise TTFError('Unknown location table format (%d)' % indexToLocFormat)
# Subsetting
def makeSubset(self, subset):
"""Create a subset of a TrueType font"""
output = TTFontMaker()
# Build a mapping of glyphs in the subset to glyph numbers in
# the original font. Also build a mapping of UCS codes to
# glyph values in the new font.
# Start with 0 -> 0: "missing character"
glyphMap = [0] # new glyph index -> old glyph index
glyphSet = {0:0} # old glyph index -> new glyph index
codeToGlyph = {} # unicode -> new glyph index
for code in subset:
if code in self.charToGlyph:
originalGlyphIdx = self.charToGlyph[code]
else:
originalGlyphIdx = 0
if originalGlyphIdx not in glyphSet:
glyphSet[originalGlyphIdx] = len(glyphMap)
glyphMap.append(originalGlyphIdx)
codeToGlyph[code] = glyphSet[originalGlyphIdx]
# Also include glyphs that are parts of composite glyphs
start = self.get_table_pos('glyf')[0]
n = 0
while n < len(glyphMap):
originalGlyphIdx = glyphMap[n]
glyphPos = self.glyphPos[originalGlyphIdx]
glyphLen = self.glyphPos[originalGlyphIdx + 1] - glyphPos
n += 1
if not glyphLen: continue
self.seek(start + glyphPos)
numberOfContours = self.read_short()
if numberOfContours < 0:
# composite glyph
self.skip(8)
flags = GF_MORE_COMPONENTS
while flags & GF_MORE_COMPONENTS:
flags = self.read_ushort()
glyphIdx = self.read_ushort()
if glyphIdx not in glyphSet:
glyphSet[glyphIdx] = len(glyphMap)
glyphMap.append(glyphIdx)
if flags & GF_ARG_1_AND_2_ARE_WORDS:
self.skip(4)
else:
self.skip(2)
if flags & GF_WE_HAVE_A_SCALE:
self.skip(2)
elif flags & GF_WE_HAVE_AN_X_AND_Y_SCALE:
self.skip(4)
elif flags & GF_WE_HAVE_A_TWO_BY_TWO:
self.skip(8)
numGlyphs = n = len(glyphMap)
while n > 1 and self.hmetrics[n][0] == self.hmetrics[n - 1][0]:
n -= 1
numberOfHMetrics = n
# The following tables are simply copied from the original
for tag in ('name', 'OS/2', 'cvt ', 'fpgm', 'prep'):
try:
output.add(tag, self.get_table(tag))
except KeyError:
# Apparently some of the tables are optional (cvt, fpgm, prep).
# The lack of the required ones (name, OS/2) would have already
# been caught before.
pass
# post - PostScript
post = b"\x00\x03\x00\x00" + self.get_table('post')[4:16] + b"\x00" * 16
output.add('post', post)
# hhea - Horizontal Header
hhea = self.get_table('hhea')
hhea = _set_ushort(hhea, 34, numberOfHMetrics)
output.add('hhea', hhea)
# maxp - Maximum Profile
maxp = self.get_table('maxp')
maxp = _set_ushort(maxp, 4, numGlyphs)
output.add('maxp', maxp)
# cmap - Character to glyph mapping
# XXX maybe use format 0 if possible, not 6?
entryCount = len(subset)
length = 10 + entryCount * 2
cmap = [0, 1, # version, number of tables
1, 0, 0,12, # platform, encoding, offset (hi,lo)
6, length, 0, # format, length, language
0,
entryCount] + \
[codeToGlyph.get(code) for code in subset]
cmap = pack(*([">%dH" % len(cmap)] + cmap))
output.add('cmap', cmap)
# hmtx - Horizontal Metrics
hmtx = []
for n in range(numGlyphs):
originalGlyphIdx = glyphMap[n]
aw, lsb = self.hmetrics[originalGlyphIdx]
if n < numberOfHMetrics:
hmtx.append(int(aw))
hmtx.append(int(lsb))
hmtx = pack(*([">%dH" % len(hmtx)] + hmtx))
output.add('hmtx', hmtx)
# glyf - Glyph data
glyphData = self.get_table('glyf')
offsets = []
glyf = []
pos = 0
for n in range(numGlyphs):
offsets.append(pos)
originalGlyphIdx = glyphMap[n]
glyphPos = self.glyphPos[originalGlyphIdx]
glyphLen = self.glyphPos[originalGlyphIdx + 1] - glyphPos
data = glyphData[glyphPos:glyphPos+glyphLen]
# Fix references in composite glyphs
if glyphLen > 2 and unpack(">h", data[:2])[0] < 0:
# composite glyph
pos_in_glyph = 10
flags = GF_MORE_COMPONENTS
while flags & GF_MORE_COMPONENTS:
flags = unpack(">H", data[pos_in_glyph:pos_in_glyph+2])[0]
glyphIdx = unpack(">H", data[pos_in_glyph+2:pos_in_glyph+4])[0]
data = _set_ushort(data, pos_in_glyph + 2, glyphSet[glyphIdx])
pos_in_glyph = pos_in_glyph + 4
if flags & GF_ARG_1_AND_2_ARE_WORDS:
pos_in_glyph = pos_in_glyph + 4
else:
pos_in_glyph = pos_in_glyph + 2
if flags & GF_WE_HAVE_A_SCALE:
pos_in_glyph = pos_in_glyph + 2
elif flags & GF_WE_HAVE_AN_X_AND_Y_SCALE:
pos_in_glyph = pos_in_glyph + 4
elif flags & GF_WE_HAVE_A_TWO_BY_TWO:
pos_in_glyph = pos_in_glyph + 8
glyf.append(data)
pos = pos + glyphLen
if pos % 4 != 0:
padding = 4 - pos % 4
glyf.append(b'\0' * padding)
pos = pos + padding
offsets.append(pos)
output.add('glyf', b"".join(glyf))
# loca - Index to location
loca = []
if (pos + 1) >> 1 > 0xFFFF:
indexToLocFormat = 1 # long format
for offset in offsets:
loca.append(offset)
loca = pack(*([">%dL" % len(loca)] + loca))
else:
indexToLocFormat = 0 # short format
for offset in offsets:
loca.append(offset >> 1)
loca = pack(*([">%dH" % len(loca)] + loca))
output.add('loca', loca)
# head - Font header
head = self.get_table('head')
head = _set_ushort(head, 50, indexToLocFormat)
output.add('head', head)
return output.makeStream()
#
# TrueType font embedding
#
# PDF font flags (see PDF Reference Guide table 5.19)
FF_FIXED = 1 << 1-1
FF_SERIF = 1 << 2-1
FF_SYMBOLIC = 1 << 3-1
FF_SCRIPT = 1 << 4-1
FF_NONSYMBOLIC = 1 << 6-1
FF_ITALIC = 1 << 7-1
FF_ALLCAP = 1 << 17-1
FF_SMALLCAP = 1 << 18-1
FF_FORCEBOLD = 1 << 19-1
class TTFontFace(TTFontFile, pdfmetrics.TypeFace):
"""TrueType typeface.
Conceptually similar to a single byte typeface, but the glyphs are
identified by UCS character codes instead of glyph names."""
def __init__(self, filename, validate=0, subfontIndex=0):
"Loads a TrueType font from filename."
pdfmetrics.TypeFace.__init__(self, None)
TTFontFile.__init__(self, filename, validate=validate, subfontIndex=subfontIndex)
def getCharWidth(self, code):
"Returns the width of character U+<code>"
return self.charWidths.get(code, self.defaultWidth)
def addSubsetObjects(self, doc, fontname, subset):
"""Generate a TrueType font subset and add it to the PDF document.
Returns a PDFReference to the new FontDescriptor object."""
fontFile = pdfdoc.PDFStream()
fontFile.content = self.makeSubset(subset)
fontFile.dictionary['Length1'] = len(fontFile.content)
if doc.compression:
fontFile.filters = [pdfdoc.PDFZCompress]
fontFileRef = doc.Reference(fontFile, 'fontFile:%s(%s)' % (self.filename, fontname))
flags = self.flags & ~ FF_NONSYMBOLIC
flags = flags | FF_SYMBOLIC
fontDescriptor = pdfdoc.PDFDictionary({
'Type': '/FontDescriptor',
'Ascent': self.ascent,
'CapHeight': self.capHeight,
'Descent': self.descent,
'Flags': flags,
'FontBBox': pdfdoc.PDFArray(self.bbox),
'FontName': pdfdoc.PDFName(fontname),
'ItalicAngle': self.italicAngle,
'StemV': self.stemV,
'FontFile2': fontFileRef,
})
return doc.Reference(fontDescriptor, 'fontDescriptor:' + fontname)
class TTEncoding:
"""Encoding for TrueType fonts (always UTF-8).
TTEncoding does not directly participate in PDF object creation, since
we need a number of different 8-bit encodings for every generated font
subset. TTFont itself cares about that."""
def __init__(self):
self.name = "UTF-8"
class TTFont:
"""Represents a TrueType font.
Its encoding is always UTF-8.
Note: you cannot use the same TTFont object for different documents
at the same time.
Example of usage:
font = ttfonts.TTFont('PostScriptFontName', '/path/to/font.ttf')
pdfmetrics.registerFont(font)
canvas.setFont('PostScriptFontName', size)
canvas.drawString(x, y, "Some text encoded in UTF-8")
"""
class State:
namePrefix = 'F'
def __init__(self,asciiReadable=None):
self.assignments = {}
self.nextCode = 0
self.internalName = None
self.frozen = 0
if asciiReadable is None:
asciiReadable = rl_config.ttfAsciiReadable
if asciiReadable:
# Let's add the first 128 unicodes to the 0th subset, so ' '
# always has code 32 (for word spacing to work) and the ASCII
# output is readable
subset0 = list(range(128))
self.subsets = [subset0]
for n in subset0:
self.assignments[n] = n
self.nextCode = 128
else:
self.subsets = [[32]*33]
self.assignments[32] = 32
_multiByte = 1 # We want our own stringwidth
_dynamicFont = 1 # We want dynamic subsetting
def __init__(self, name, filename, validate=0, subfontIndex=0,asciiReadable=None):
"""Loads a TrueType font from filename.
If validate is set to a false values, skips checksum validation. This
can save time, especially if the font is large.
"""
self.fontName = name
self.face = TTFontFace(filename, validate=validate, subfontIndex=subfontIndex)
self.encoding = TTEncoding()
from weakref import WeakKeyDictionary
self.state = WeakKeyDictionary()
if asciiReadable is None:
asciiReadable = rl_config.ttfAsciiReadable
self._asciiReadable = asciiReadable
def _py_stringWidth(self, text, size, encoding='utf-8'):
"Calculate text width"
if not isUnicodeType(text):
text = text.decode(encoding or 'utf-8') # encoding defaults to utf-8
g = self.face.charWidths.get
dw = self.face.defaultWidth
return 0.001*size*sum([g(ord(u),dw) for u in text])
stringWidth = _py_stringWidth
def _assignState(self,doc,asciiReadable=None,namePrefix=None):
'''convenience function for those wishing to roll their own state properties'''
if asciiReadable is None:
asciiReadable = self._asciiReadable
try:
state = self.state[doc]
except KeyError:
state = self.state[doc] = TTFont.State(asciiReadable)
if namePrefix is not None:
state.namePrefix = namePrefix
return state
def splitString(self, text, doc, encoding='utf-8'):
"""Splits text into a number of chunks, each of which belongs to a
single subset. Returns a list of tuples (subset, string). Use subset
numbers with getSubsetInternalName. Doc is needed for distinguishing
subsets when building different documents at the same time."""
asciiReadable = self._asciiReadable
try: state = self.state[doc]
except KeyError: state = self.state[doc] = TTFont.State(asciiReadable)
curSet = -1
cur = []
results = []
if not isUnicodeType(text):
text = text.decode(encoding or 'utf-8') # encoding defaults to utf-8
assignments = state.assignments
subsets = state.subsets
for code in map(ord,text):
if code in assignments:
n = assignments[code]
else:
if state.frozen:
raise pdfdoc.PDFError("Font %s is already frozen, cannot add new character U+%04X" % (self.fontName, code))
n = state.nextCode
if n&0xFF==32:
# make code 32 always be a space character
if n!=32: subsets[n >> 8].append(32)
state.nextCode += 1
n = state.nextCode
state.nextCode += 1
assignments[code] = n
if n>32:
if not(n&0xFF): subsets.append([])
subsets[n >> 8].append(code)
else:
subsets[0][n] = code
if (n >> 8) != curSet:
if cur:
results.append((curSet, ''.join(map(chr,cur))))
curSet = (n >> 8)
cur = []
cur.append(n & 0xFF)
if cur:
results.append((curSet,''.join(map(chr,cur))))
return results
def getSubsetInternalName(self, subset, doc):
"""Returns the name of a PDF Font object corresponding to a given
subset of this dynamic font. Use this function instead of
PDFDocument.getInternalFontName."""
try: state = self.state[doc]
except KeyError: state = self.state[doc] = TTFont.State(self._asciiReadable)
if subset < 0 or subset >= len(state.subsets):
raise IndexError('Subset %d does not exist in font %s' % (subset, self.fontName))
if state.internalName is None:
state.internalName = state.namePrefix +repr(len(doc.fontMapping) + 1)
doc.fontMapping[self.fontName] = '/' + state.internalName
doc.delayedFonts.append(self)
return '/%s+%d' % (state.internalName, subset)
def addObjects(self, doc):
"""Makes one or more PDF objects to be added to the document. The
caller supplies the internal name to be used (typically F1, F2, ... in
sequence).
This method creates a number of Font and FontDescriptor objects. Every
FontDescriptor is a (no more than) 256 character subset of the original
TrueType font."""
try: state = self.state[doc]
except KeyError: state = self.state[doc] = TTFont.State(self._asciiReadable)
state.frozen = 1
for n,subset in enumerate(state.subsets):
internalName = self.getSubsetInternalName(n, doc)[1:]
baseFontName = "%s+%s%s" % (SUBSETN(n),self.face.name,self.face.subfontNameX)
pdfFont = pdfdoc.PDFTrueTypeFont()
pdfFont.__Comment__ = 'Font %s subset %d' % (self.fontName, n)
pdfFont.Name = internalName
pdfFont.BaseFont = baseFontName
pdfFont.FirstChar = 0
pdfFont.LastChar = len(subset) - 1
widths = map(self.face.getCharWidth, subset)
pdfFont.Widths = pdfdoc.PDFArray(widths)
cmapStream = pdfdoc.PDFStream()
cmapStream.content = makeToUnicodeCMap(baseFontName, subset)
if doc.compression:
cmapStream.filters = [pdfdoc.PDFZCompress]
pdfFont.ToUnicode = doc.Reference(cmapStream, 'toUnicodeCMap:' + baseFontName)
pdfFont.FontDescriptor = self.face.addSubsetObjects(doc, baseFontName, subset)
# link it in
ref = doc.Reference(pdfFont, internalName)
fontDict = doc.idToObject['BasicFonts'].dict
fontDict[internalName] = pdfFont
del self.state[doc]
try:
from _rl_accel import _instanceStringWidthTTF
import new
TTFont.stringWidth = new.instancemethod(_instanceStringWidthTTF,None,TTFont)
except ImportError:
pass
| bsd-3-clause | 468,004,839,853,736,800 | 38.67537 | 179 | 0.566653 | false |
lightalchemist/ML-algorithms | dim_reduction/spectral_embedding.py | 1 | 1410 | # -*- coding: utf-8 -*-
import numpy as np
def compute_pairwise_distance_matrix(X, k):
"""Compute pairwise distances between each point in X
and its k-nearest neighbors."""
from scipy.spatial import KDTree
kdtree = KDTree(X)
A = np.zeros((X.shape[0], X.shape[0]), dtype=np.float)
for i, x in enumerate(X):
distances, idxs = kdtree.query(x, k+1) # k+1 as one pt is the pt itself.
for d, j in zip(distances, idxs):
A[i, j] = d**2 # Store squared euclidean distance
return A
def transform(X, k=2, n_neighbors=5, sigma=1, eps=0.0001):
"""Perform dimension reduction using the eigenvectors
of the graph laplacian computed from a given set of data.
Project data X from original dimension n to k,
where X is a numpy array of dimension [m x n] and k <= n.
"""
W = compute_pairwise_distance_matrix(X, n_neighbors)
W = np.maximum(W, W.T) # Ensure W symmetric.
W[W > 0] = np.exp(- W[W > 0] / (2 * sigma**2)) # Apply gaussian kernel
D = np.diag(np.sum(W, axis=1)) # Row sum of W
L = D - W
# L = L + eps * np.eye(len(X)) # Improve the condition of the graph laplacian
Dinvsqrt = np.sqrt(np.linalg.pinv(D))
L = Dinvsqrt.dot(L).dot(Dinvsqrt) # Normalized graph laplacian
V, U = np.linalg.eigh(L)
idx = np.argsort(V) # Sort in ascending order
V, U = V[idx], U[:, idx]
return U[:, :k]
| mit | 6,210,406,793,321,975,000 | 33.390244 | 82 | 0.614184 | false |
mick-d/nipype | nipype/interfaces/nipy/model.py | 7 | 13161 | # -*- coding: utf-8 -*-
from __future__ import print_function, division, unicode_literals, absolute_import
from builtins import range, str, bytes
import os
import nibabel as nb
import numpy as np
from ...utils.misc import package_check
from ...utils import NUMPY_MMAP
from ..base import (BaseInterface, TraitedSpec, traits, File, OutputMultiPath,
BaseInterfaceInputSpec, isdefined)
have_nipy = True
try:
package_check('nipy')
except Exception as e:
have_nipy = False
else:
import nipy.modalities.fmri.design_matrix as dm
import nipy.modalities.fmri.glm as GLM
if have_nipy:
try:
BlockParadigm = dm.BlockParadigm
except AttributeError:
from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
class FitGLMInputSpec(BaseInterfaceInputSpec):
session_info = traits.List(minlen=1, maxlen=1, mandatory=True,
desc=('Session specific information generated by'
' ``modelgen.SpecifyModel``, FitGLM does '
'not support multiple runs uless they are '
'concatenated (see SpecifyModel options)'))
hrf_model = traits.Enum('Canonical', 'Canonical With Derivative', 'FIR',
desc=("that specifies the hemodynamic reponse "
"function it can be 'Canonical', 'Canonical "
"With Derivative' or 'FIR'"), usedefault=True)
drift_model = traits.Enum("Cosine", "Polynomial", "Blank",
desc=("string that specifies the desired drift "
"model, to be chosen among 'Polynomial', "
"'Cosine', 'Blank'"), usedefault=True)
TR = traits.Float(mandatory=True)
model = traits.Enum("ar1", "spherical",
desc=("autoregressive mode is available only for the "
"kalman method"), usedefault=True)
method = traits.Enum("kalman", "ols",
desc=("method to fit the model, ols or kalma; kalman "
"is more time consuming but it supports "
"autoregressive model"), usedefault=True)
mask = traits.File(exists=True,
desc=("restrict the fitting only to the region defined "
"by this mask"))
normalize_design_matrix = traits.Bool(False,
desc=("normalize (zscore) the "
"regressors before fitting"),
usedefault=True)
save_residuals = traits.Bool(False, usedefault=True)
plot_design_matrix = traits.Bool(False, usedefault=True)
class FitGLMOutputSpec(TraitedSpec):
beta = File(exists=True)
nvbeta = traits.Any()
s2 = File(exists=True)
dof = traits.Any()
constants = traits.Any()
axis = traits.Any()
reg_names = traits.List()
residuals = traits.File()
a = File(exists=True)
class FitGLM(BaseInterface):
'''
Fit GLM model based on the specified design. Supports only single or concatenated runs.
'''
input_spec = FitGLMInputSpec
output_spec = FitGLMOutputSpec
def _run_interface(self, runtime):
session_info = self.inputs.session_info
functional_runs = self.inputs.session_info[0]['scans']
if isinstance(functional_runs, (str, bytes)):
functional_runs = [functional_runs]
nii = nb.load(functional_runs[0])
data = nii.get_data()
if isdefined(self.inputs.mask):
mask = nb.load(self.inputs.mask).get_data() > 0
else:
mask = np.ones(nii.shape[:3]) == 1
timeseries = data.copy()[mask, :]
del data
for functional_run in functional_runs[1:]:
nii = nb.load(functional_run, mmap=NUMPY_MMAP)
data = nii.get_data()
npdata = data.copy()
del data
timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1)
del npdata
nscans = timeseries.shape[1]
if 'hpf' in list(session_info[0].keys()):
hpf = session_info[0]['hpf']
drift_model = self.inputs.drift_model
else:
hpf = 0
drift_model = "Blank"
reg_names = []
for reg in session_info[0]['regress']:
reg_names.append(reg['name'])
reg_vals = np.zeros((nscans, len(reg_names)))
for i in range(len(reg_names)):
reg_vals[:, i] = np.array(session_info[0]['regress'][i]['val']).reshape(1, -1)
frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans)
conditions = []
onsets = []
duration = []
for i, cond in enumerate(session_info[0]['cond']):
onsets += cond['onset']
conditions += [cond['name']] * len(cond['onset'])
if len(cond['duration']) == 1:
duration += cond['duration'] * len(cond['onset'])
else:
duration += cond['duration']
if conditions:
paradigm = BlockParadigm(con_id=conditions, onset=onsets, duration=duration)
else:
paradigm = None
design_matrix, self._reg_names = dm.dmtx_light(frametimes, paradigm, drift_model=drift_model, hfcut=hpf,
hrf_model=self.inputs.hrf_model,
add_regs=reg_vals,
add_reg_names=reg_names
)
if self.inputs.normalize_design_matrix:
for i in range(len(self._reg_names) - 1):
design_matrix[:, i] = ((design_matrix[:, i] -
design_matrix[:, i].mean()) /
design_matrix[:, i].std())
if self.inputs.plot_design_matrix:
import pylab
pylab.pcolor(design_matrix)
pylab.savefig("design_matrix.pdf")
pylab.close()
pylab.clf()
glm = GLM.GeneralLinearModel()
glm.fit(timeseries.T, design_matrix, method=self.inputs.method, model=self.inputs.model)
self._beta_file = os.path.abspath("beta.nii")
beta = np.zeros(mask.shape + (glm.beta.shape[0],))
beta[mask, :] = glm.beta.T
nb.save(nb.Nifti1Image(beta, nii.affine), self._beta_file)
self._s2_file = os.path.abspath("s2.nii")
s2 = np.zeros(mask.shape)
s2[mask] = glm.s2
nb.save(nb.Nifti1Image(s2, nii.affine), self._s2_file)
if self.inputs.save_residuals:
explained = np.dot(design_matrix, glm.beta)
residuals = np.zeros(mask.shape + (nscans,))
residuals[mask, :] = timeseries - explained.T
self._residuals_file = os.path.abspath("residuals.nii")
nb.save(nb.Nifti1Image(residuals, nii.affine), self._residuals_file)
self._nvbeta = glm.nvbeta
self._dof = glm.dof
self._constants = glm._constants
self._axis = glm._axis
if self.inputs.model == "ar1":
self._a_file = os.path.abspath("a.nii")
a = np.zeros(mask.shape)
a[mask] = glm.a.squeeze()
nb.save(nb.Nifti1Image(a, nii.affine), self._a_file)
self._model = glm.model
self._method = glm.method
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs["beta"] = self._beta_file
outputs["nvbeta"] = self._nvbeta
outputs["s2"] = self._s2_file
outputs["dof"] = self._dof
outputs["constants"] = self._constants
outputs["axis"] = self._axis
outputs["reg_names"] = self._reg_names
if self.inputs.model == "ar1":
outputs["a"] = self._a_file
if self.inputs.save_residuals:
outputs["residuals"] = self._residuals_file
return outputs
class EstimateContrastInputSpec(BaseInterfaceInputSpec):
contrasts = traits.List(
traits.Either(traits.Tuple(traits.Str,
traits.Enum('T'),
traits.List(traits.Str),
traits.List(traits.Float)),
traits.Tuple(traits.Str,
traits.Enum('T'),
traits.List(traits.Str),
traits.List(traits.Float),
traits.List(traits.Float)),
traits.Tuple(traits.Str,
traits.Enum('F'),
traits.List(traits.Either(traits.Tuple(traits.Str,
traits.Enum('T'),
traits.List(traits.Str),
traits.List(traits.Float)),
traits.Tuple(traits.Str,
traits.Enum('T'),
traits.List(traits.Str),
traits.List(traits.Float),
traits.List(traits.Float)))))),
desc="""List of contrasts with each contrast being a list of the form:
[('name', 'stat', [condition list], [weight list], [session list])]. if
session list is None or not provided, all sessions are used. For F
contrasts, the condition list should contain previously defined
T-contrasts.""", mandatory=True)
beta = File(exists=True, desc="beta coefficients of the fitted model", mandatory=True)
nvbeta = traits.Any(mandatory=True)
s2 = File(exists=True, desc="squared variance of the residuals", mandatory=True)
dof = traits.Any(desc="degrees of freedom", mandatory=True)
constants = traits.Any(mandatory=True)
axis = traits.Any(mandatory=True)
reg_names = traits.List(mandatory=True)
mask = traits.File(exists=True)
class EstimateContrastOutputSpec(TraitedSpec):
stat_maps = OutputMultiPath(File(exists=True))
z_maps = OutputMultiPath(File(exists=True))
p_maps = OutputMultiPath(File(exists=True))
class EstimateContrast(BaseInterface):
'''
Estimate contrast of a fitted model.
'''
input_spec = EstimateContrastInputSpec
output_spec = EstimateContrastOutputSpec
def _run_interface(self, runtime):
beta_nii = nb.load(self.inputs.beta)
if isdefined(self.inputs.mask):
mask = nb.load(self.inputs.mask).get_data() > 0
else:
mask = np.ones(beta_nii.shape[:3]) == 1
glm = GLM.GeneralLinearModel()
nii = nb.load(self.inputs.beta)
glm.beta = beta_nii.get_data().copy()[mask, :].T
glm.nvbeta = self.inputs.nvbeta
glm.s2 = nb.load(self.inputs.s2).get_data().copy()[mask]
glm.dof = self.inputs.dof
glm._axis = self.inputs.axis
glm._constants = self.inputs.constants
reg_names = self.inputs.reg_names
self._stat_maps = []
self._p_maps = []
self._z_maps = []
for contrast_def in self.inputs.contrasts:
name = contrast_def[0]
_ = contrast_def[1]
contrast = np.zeros(len(reg_names))
for i, reg_name in enumerate(reg_names):
if reg_name in contrast_def[2]:
idx = contrast_def[2].index(reg_name)
contrast[i] = contrast_def[3][idx]
est_contrast = glm.contrast(contrast)
stat_map = np.zeros(mask.shape)
stat_map[mask] = est_contrast.stat().T
stat_map_file = os.path.abspath(name + "_stat_map.nii")
nb.save(nb.Nifti1Image(stat_map, nii.affine), stat_map_file)
self._stat_maps.append(stat_map_file)
p_map = np.zeros(mask.shape)
p_map[mask] = est_contrast.pvalue().T
p_map_file = os.path.abspath(name + "_p_map.nii")
nb.save(nb.Nifti1Image(p_map, nii.affine), p_map_file)
self._p_maps.append(p_map_file)
z_map = np.zeros(mask.shape)
z_map[mask] = est_contrast.zscore().T
z_map_file = os.path.abspath(name + "_z_map.nii")
nb.save(nb.Nifti1Image(z_map, nii.affine), z_map_file)
self._z_maps.append(z_map_file)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs["stat_maps"] = self._stat_maps
outputs["p_maps"] = self._p_maps
outputs["z_maps"] = self._z_maps
return outputs
| bsd-3-clause | 6,562,482,735,676,853,000 | 39.74613 | 112 | 0.52458 | false |
X-dark/Flexget | flexget/__init__.py | 1 | 1135 | #!/usr/bin/python
from __future__ import unicode_literals, division, absolute_import
import os
import logging
from flexget import logger
from flexget.options import get_parser
from flexget import plugin
from flexget.manager import Manager
__version__ = '{git}'
log = logging.getLogger('main')
def main(args=None):
"""Main entry point for Command Line Interface"""
logger.initialize()
plugin.load_plugins()
options = get_parser().parse_args(args)
manager = Manager(options)
log_level = logging.getLevelName(options.loglevel.upper())
log_file = os.path.expanduser(manager.options.logfile)
# If an absolute path is not specified, use the config directory.
if not os.path.isabs(log_file):
log_file = os.path.join(manager.config_base, log_file)
logger.start(log_file, log_level)
if options.profile:
try:
import cProfile as profile
except ImportError:
import profile
profile.runctx('manager.start()', globals(), locals(),
os.path.join(manager.config_base, options.profile))
else:
manager.start()
| mit | -4,021,792,851,769,180,700 | 26.682927 | 74 | 0.672247 | false |
spcui/virt-test | virttest/libvirt_xml/vol_xml.py | 1 | 3408 | """
Module simplifying manipulation of XML described at
http://libvirt.org/
"""
from virttest.libvirt_xml import base, accessors
class VolXMLBase(base.LibvirtXMLBase):
"""
Accessor methods for VolXML class.
Properties:
name: string, operates on XML name tag
uuid: string, operates on uuid tag
type: string, operates on type tag
capacity: integer, operates on capacity attribute of capacity tag
allocation: integer, operates on allocation attribute of allocation
available: integer, operates on available attribute of available
source: nothing
"""
__slots__ = base.LibvirtXMLBase.__slots__ + ('name', 'key',
'capacity', 'allocation',
'format', 'path')
__uncompareable__ = base.LibvirtXMLBase.__uncompareable__
__schema_name__ = "storagevol"
def __init__(self, virsh_instance=base.virsh):
accessors.XMLElementText('name', self, parent_xpath='/',
tag_name='name')
accessors.XMLElementText('key', self, parent_xpath='/',
tag_name='key')
accessors.XMLElementInt('capacity', self, parent_xpath='/',
tag_name='capacity')
accessors.XMLElementInt('allocation', self, parent_xpath='/',
tag_name='allocation')
accessors.XMLAttribute('format', self, parent_xpath='/target',
tag_name='format', attribute='type')
accessors.XMLElementText('path', self, parent_xpath='/target',
tag_name='path')
super(VolXMLBase, self).__init__(virsh_instance=virsh_instance)
class VolXML(VolXMLBase):
"""
Manipulators of a Virtual Vol through it's XML definition.
"""
__slots__ = VolXMLBase.__slots__
def __init__(self, vol_name='default', virsh_instance=base.virsh):
"""
Initialize new instance with empty XML
"""
super(VolXML, self).__init__(virsh_instance=virsh_instance)
self.xml = u"<volume><name>%s</name></volume>" % vol_name
@staticmethod
def new_from_vol_dumpxml(vol_name, pool_name, virsh_instance=base.virsh):
"""
Return new VolXML instance from virsh vol-dumpxml command
:param vol_name: Name of vol to vol-dumpxml
:param virsh_instance: virsh module or instance to use
:return: New initialized VolXML instance
"""
volxml = VolXML(virsh_instance=virsh_instance)
volxml['xml'] = virsh_instance.vol_dumpxml(vol_name, pool_name)\
.stdout.strip()
return volxml
@staticmethod
def get_vol_details_by_name(vol_name, pool_name, virsh_instance=base.virsh):
"""
Return Vol's uuid by Vol's name.
:param vol_name: Vol's name
:return: Vol's uuid
"""
volume_xml = {}
vol_xml = VolXML.new_from_vol_dumpxml(vol_name, pool_name,
virsh_instance)
volume_xml['key'] = vol_xml.key
volume_xml['path'] = vol_xml.path
volume_xml['format'] = vol_xml.format
volume_xml['capacity'] = vol_xml.capacity
volume_xml['allocation'] = vol_xml.allocation
return volume_xml
| gpl-2.0 | -8,600,053,957,075,828,000 | 35.645161 | 80 | 0.569542 | false |
switowski/invenio | invenio/modules/submit/models.py | 1 | 19892 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011, 2012, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""WebSubmit database models."""
from invenio.ext.sqlalchemy import db
from sqlalchemy.dialects import mysql
from sqlalchemy.schema import Index
class SbmACTION(db.Model):
"""Represent a SbmACTION record."""
__tablename__ = 'sbmACTION'
lactname = db.Column(db.Text, nullable=True)
sactname = db.Column(db.Char(3), nullable=False, server_default='',
primary_key=True)
dir = db.Column(db.Text, nullable=True)
cd = db.Column(db.Date, nullable=True)
md = db.Column(db.Date, nullable=True)
actionbutton = db.Column(db.Text, nullable=True)
statustext = db.Column(db.Text, nullable=True)
class SbmALLFUNCDESCR(db.Model):
"""Represent a SbmALLFUNCDESCR record."""
__tablename__ = 'sbmALLFUNCDESCR'
# FIX ME pk
function = db.Column(db.String(40), nullable=False, server_default='',
primary_key=True)
description = db.Column(db.TinyText, nullable=True)
class SbmAPPROVAL(db.Model):
"""Represent a SbmAPPROVAL record."""
__tablename__ = 'sbmAPPROVAL'
doctype = db.Column(db.String(10), nullable=False,
server_default='')
categ = db.Column(db.String(50), nullable=False,
server_default='')
rn = db.Column(db.String(50), nullable=False, server_default='',
primary_key=True)
status = db.Column(db.String(10), nullable=False,
server_default='')
dFirstReq = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dLastReq = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dAction = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
access = db.Column(db.String(20), nullable=False,
server_default='0')
note = db.Column(db.Text, nullable=False)
class SbmCATEGORIES(db.Model):
"""Represent a SbmCATEGORIES record."""
__tablename__ = 'sbmCATEGORIES'
doctype = db.Column(db.String(10), nullable=False, server_default='',
primary_key=True, index=True)
sname = db.Column(db.String(75), nullable=False, server_default='',
primary_key=True, index=True)
lname = db.Column(db.String(75), nullable=False,
server_default='')
score = db.Column(db.TinyInteger(3, unsigned=True), nullable=False,
server_default='0')
class SbmCHECKS(db.Model):
"""Represent a SbmCHECKS record."""
__tablename__ = 'sbmCHECKS'
chname = db.Column(db.String(15), nullable=False, server_default='',
primary_key=True)
chdesc = db.Column(db.Text, nullable=True)
cd = db.Column(db.Date, nullable=True)
md = db.Column(db.Date, nullable=True)
chefi1 = db.Column(db.Text, nullable=True)
chefi2 = db.Column(db.Text, nullable=True)
class SbmCOLLECTION(db.Model):
"""Represents a SbmCOLLECTION record."""
__tablename__ = 'sbmCOLLECTION'
id = db.Column(db.Integer(11), nullable=False,
primary_key=True,
autoincrement=True)
name = db.Column(db.String(100), nullable=False,
server_default='')
class SbmCOLLECTIONSbmCOLLECTION(db.Model):
"""Represents a SbmCOLLECTIONSbmCOLLECTION record."""
__tablename__ = 'sbmCOLLECTION_sbmCOLLECTION'
id = db.Column(db.Integer(11), nullable=False, autoincrement=True,
primary_key=True)
_id_father = db.Column(db.Integer(11), db.ForeignKey(SbmCOLLECTION.id),
nullable=True, name='id_father')
id_son = db.Column(db.Integer(11), db.ForeignKey(SbmCOLLECTION.id),
nullable=False)
catalogue_order = db.Column(db.Integer(11), nullable=False,
server_default='0')
son = db.relationship(
SbmCOLLECTION,
backref=db.backref('father', uselist=False),
single_parent=True,
primaryjoin="and_("
"SbmCOLLECTIONSbmCOLLECTION.id_son==SbmCOLLECTION.id) "
)
father = db.relationship(
SbmCOLLECTION,
backref=db.backref('son', uselist=False),
single_parent=True,
primaryjoin="and_("
"SbmCOLLECTIONSbmCOLLECTION.id_father==SbmCOLLECTION.id) "
)
@db.hybrid_property
def id_father(self):
"""Get id_father."""
return self._id_father
@id_father.setter
def id_father(self, value):
"""Set id_father."""
self._id_father = value or None
class SbmDOCTYPE(db.Model):
"""Represents a SbmDOCTYPE record."""
__tablename__ = 'sbmDOCTYPE'
ldocname = db.Column(db.Text, nullable=True)
sdocname = db.Column(db.String(10), nullable=True,
primary_key=True)
cd = db.Column(db.Date, nullable=True)
md = db.Column(db.Date, nullable=True)
description = db.Column(db.Text, nullable=True)
class SbmCOLLECTIONSbmDOCTYPE(db.Model):
"""Represents a SbmCOLLECTIONSbmDOCTYPE record."""
__tablename__ = 'sbmCOLLECTION_sbmDOCTYPE'
id = db.Column(db.Integer(11), nullable=False, autoincrement=True,
primary_key=True)
_id_father = db.Column(db.Integer(11), db.ForeignKey(SbmCOLLECTION.id),
nullable=True, name="id_father")
id_son = db.Column(db.Char(10), db.ForeignKey(SbmDOCTYPE.sdocname),
nullable=False)
catalogue_order = db.Column(db.Integer(11), nullable=False,
server_default='0')
father = db.relationship(
SbmCOLLECTION,
backref=db.backref('sonDoctype', uselist=False),
)
@db.hybrid_property
def id_father(self):
"""Get id_father."""
return self._id_father
@id_father.setter
def id_father(self, value):
"""Set id_father."""
self._id_father = value or None
class SbmCOOKIES(db.Model):
"""Represents a SbmCOOKIES record."""
__tablename__ = 'sbmCOOKIES'
id = db.Column(db.Integer(15, unsigned=True), nullable=False,
primary_key=True, autoincrement=True)
name = db.Column(db.String(100), nullable=False)
value = db.Column(db.Text, nullable=True)
uid = db.Column(db.Integer(15), nullable=False)
class SbmCPLXAPPROVAL(db.Model):
"""Represents a SbmCPLXAPPROVAL record."""
__tablename__ = 'sbmCPLXAPPROVAL'
doctype = db.Column(db.String(10), nullable=False,
server_default='')
categ = db.Column(db.String(50), nullable=False,
server_default='')
rn = db.Column(db.String(50), nullable=False, server_default='',
primary_key=True)
type = db.Column(db.String(10), nullable=False,
primary_key=True)
status = db.Column(db.String(10), nullable=False)
id_group = db.Column(db.Integer(15, unsigned=True), nullable=False,
server_default='0')
id_bskBASKET = db.Column(db.Integer(15, unsigned=True), nullable=False,
server_default='0')
id_EdBoardGroup = db.Column(db.Integer(15, unsigned=True), nullable=False,
server_default='0')
dFirstReq = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dLastReq = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dEdBoardSel = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dRefereeSel = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dRefereeRecom = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dEdBoardRecom = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dPubComRecom = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dProjectLeaderAction = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
class SbmFIELD(db.Model):
"""Represents a SbmFIELD record."""
__tablename__ = 'sbmFIELD'
subname = db.Column(db.String(13), nullable=True,
primary_key=True)
pagenb = db.Column(db.Integer(11), nullable=True,
primary_key=True, autoincrement=False)
fieldnb = db.Column(db.Integer(11), nullable=True)
fidesc = db.Column(db.String(15), nullable=True,
primary_key=True)
fitext = db.Column(db.Text, nullable=True)
level = db.Column(db.Char(1), nullable=True)
sdesc = db.Column(db.Text, nullable=True)
checkn = db.Column(db.Text, nullable=True)
cd = db.Column(db.Date, nullable=True)
md = db.Column(db.Date, nullable=True)
fiefi1 = db.Column(db.Text, nullable=True)
fiefi2 = db.Column(db.Text, nullable=True)
class SbmFIELDDESC(db.Model):
"""Represents a SbmFIELDDESC record."""
__tablename__ = 'sbmFIELDDESC'
name = db.Column(db.String(15), # db.ForeignKey(SbmFIELD.fidesc),
nullable=False, server_default='', primary_key=True)
alephcode = db.Column(db.String(50), nullable=True)
marccode = db.Column(db.String(50), nullable=False, server_default='')
type = db.Column(db.Char(1), nullable=True)
size = db.Column(db.Integer(11), nullable=True)
rows = db.Column(db.Integer(11), nullable=True)
cols = db.Column(db.Integer(11), nullable=True)
maxlength = db.Column(db.Integer(11), nullable=True)
val = db.Column(db.Text, nullable=True)
fidesc = db.Column(db.Text, nullable=True)
cd = db.Column(db.Date, nullable=True)
md = db.Column(db.Date, nullable=True)
modifytext = db.Column(db.Text, nullable=True)
fddfi2 = db.Column(db.Text, nullable=True)
cookie = db.Column(db.Integer(11), nullable=True,
server_default='0')
# field = db.relationship(SbmFIELD, backref='fielddescs')
class SbmFORMATEXTENSION(db.Model):
"""Represents a SbmFORMATEXTENSION record."""
__tablename__ = 'sbmFORMATEXTENSION'
id = db.Column(db.Integer(), nullable=False,
primary_key=True, autoincrement=True)
FILE_FORMAT = db.Column(
db.Text().with_variant(mysql.TEXT(50), 'mysql'),
nullable=False)
FILE_EXTENSION = db.Column(
db.Text().with_variant(mysql.TEXT(10), 'mysql'),
nullable=False)
Index('sbmformatextension_file_format_idx',
SbmFORMATEXTENSION.FILE_FORMAT, mysql_length=50)
Index('sbmformatextension_file_extension_idx',
SbmFORMATEXTENSION.FILE_EXTENSION, mysql_length=10)
class SbmFUNCTIONS(db.Model):
"""Represents a SbmFUNCTIONS record."""
__tablename__ = 'sbmFUNCTIONS'
action = db.Column(db.String(10), nullable=False,
server_default='', primary_key=True)
doctype = db.Column(db.String(10), nullable=False,
server_default='', primary_key=True)
function = db.Column(db.String(40), nullable=False,
server_default='', primary_key=True)
score = db.Column(db.Integer(11), nullable=False,
server_default='0', primary_key=True)
step = db.Column(db.TinyInteger(4), nullable=False,
server_default='1', primary_key=True)
class SbmFUNDESC(db.Model):
"""Represents a SbmFUNDESC record."""
__tablename__ = 'sbmFUNDESC'
function = db.Column(db.String(40), nullable=False,
server_default='', primary_key=True)
param = db.Column(db.String(40), primary_key=True)
class SbmGFILERESULT(db.Model):
"""Represents a SbmGFILERESULT record."""
__tablename__ = 'sbmGFILERESULT'
id = db.Column(db.Integer(), nullable=False,
primary_key=True, autoincrement=True)
FORMAT = db.Column(
db.Text().with_variant(db.Text(50), 'mysql'),
nullable=False)
RESULT = db.Column(
db.Text().with_variant(db.Text(50), 'mysql'),
nullable=False)
Index('sbmgfileresult_format_idx', SbmGFILERESULT.FORMAT, mysql_length=50)
Index('sbmgfileresult_result_idx', SbmGFILERESULT.RESULT, mysql_length=50)
class SbmIMPLEMENT(db.Model):
"""Represents a SbmIMPLEMENT record."""
__tablename__ = 'sbmIMPLEMENT'
docname = db.Column(db.String(10), nullable=True)
actname = db.Column(db.Char(3), nullable=True)
displayed = db.Column(db.Char(1), nullable=True)
subname = db.Column(db.String(13), nullable=True, primary_key=True)
nbpg = db.Column(db.Integer(11), nullable=True, primary_key=True,
autoincrement=False)
cd = db.Column(db.Date, nullable=True)
md = db.Column(db.Date, nullable=True)
buttonorder = db.Column(db.Integer(11), nullable=True)
statustext = db.Column(db.Text, nullable=True)
level = db.Column(db.Char(1), nullable=False, server_default='')
score = db.Column(db.Integer(11), nullable=False, server_default='0')
stpage = db.Column(db.Integer(11), nullable=False, server_default='0')
endtxt = db.Column(db.String(100), nullable=False, server_default='')
class SbmPARAMETERS(db.Model):
"""Represents a SbmPARAMETERS record."""
__tablename__ = 'sbmPARAMETERS'
doctype = db.Column(db.String(10), nullable=False,
server_default='', primary_key=True)
name = db.Column(db.String(40), nullable=False,
server_default='', primary_key=True)
value = db.Column(db.Text, nullable=False)
class SbmPUBLICATION(db.Model):
"""Represents a SbmPUBLICATION record."""
__tablename__ = 'sbmPUBLICATION'
doctype = db.Column(db.String(10), nullable=False,
server_default='', primary_key=True)
categ = db.Column(db.String(50), nullable=False,
server_default='', primary_key=True)
rn = db.Column(db.String(50), nullable=False, server_default='',
primary_key=True)
status = db.Column(db.String(10), nullable=False, server_default='')
dFirstReq = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dLastReq = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dAction = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
accessref = db.Column(db.String(20), nullable=False, server_default='')
accessedi = db.Column(db.String(20), nullable=False, server_default='')
access = db.Column(db.String(20), nullable=False, server_default='')
referees = db.Column(db.String(50), nullable=False, server_default='')
authoremail = db.Column(db.String(50), nullable=False,
server_default='')
dRefSelection = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dRefRec = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
dEdiRec = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
accessspo = db.Column(db.String(20), nullable=False, server_default='')
journal = db.Column(db.String(100), nullable=True)
class SbmPUBLICATIONCOMM(db.Model):
"""Represents a SbmPUBLICATIONCOMM record."""
__tablename__ = 'sbmPUBLICATIONCOMM'
id = db.Column(db.Integer(11), nullable=False,
primary_key=True, autoincrement=True)
id_parent = db.Column(db.Integer(11), server_default='0', nullable=True)
rn = db.Column(db.String(100), nullable=False, server_default='')
firstname = db.Column(db.String(100), nullable=True)
secondname = db.Column(db.String(100), nullable=True)
email = db.Column(db.String(100), nullable=True)
date = db.Column(db.String(40), nullable=False, server_default='')
synopsis = db.Column(db.String(255), nullable=False, server_default='')
commentfulltext = db.Column(db.Text, nullable=True)
class SbmPUBLICATIONDATA(db.Model):
"""Represents a SbmPUBLICATIONDATA record."""
__tablename__ = 'sbmPUBLICATIONDATA'
doctype = db.Column(db.String(10), nullable=False,
server_default='', primary_key=True)
editoboard = db.Column(db.String(250), nullable=False, server_default='')
base = db.Column(db.String(10), nullable=False, server_default='')
logicalbase = db.Column(db.String(10), nullable=False, server_default='')
spokesperson = db.Column(db.String(50), nullable=False, server_default='')
class SbmREFEREES(db.Model):
"""Represents a SbmREFEREES record."""
__tablename__ = 'sbmREFEREES'
doctype = db.Column(db.String(10), nullable=False, server_default='')
categ = db.Column(db.String(10), nullable=False, server_default='')
name = db.Column(db.String(50), nullable=False, server_default='')
address = db.Column(db.String(50), nullable=False, server_default='')
rid = db.Column(db.Integer(11), nullable=False, primary_key=True,
autoincrement=True)
class SbmSUBMISSIONS(db.Model):
"""Represents a SbmSUBMISSIONS record."""
__tablename__ = 'sbmSUBMISSIONS'
email = db.Column(db.String(50), nullable=False,
server_default='')
doctype = db.Column(db.String(10), nullable=False,
server_default='')
action = db.Column(db.String(10), nullable=False,
server_default='')
status = db.Column(db.String(10), nullable=False,
server_default='')
id = db.Column(db.String(30), nullable=False,
server_default='')
reference = db.Column(db.String(40), nullable=False,
server_default='')
cd = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
md = db.Column(db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00')
log_id = db.Column(db.Integer(11), nullable=False,
primary_key=True,
autoincrement=True)
__all__ = ('SbmACTION',
'SbmALLFUNCDESCR',
'SbmAPPROVAL',
'SbmCATEGORIES',
'SbmCHECKS',
'SbmCOLLECTION',
'SbmCOLLECTIONSbmCOLLECTION',
'SbmDOCTYPE',
'SbmCOLLECTIONSbmDOCTYPE',
'SbmCOOKIES',
'SbmCPLXAPPROVAL',
'SbmFIELD',
'SbmFIELDDESC',
'SbmFORMATEXTENSION',
'SbmFUNCTIONS',
'SbmFUNDESC',
'SbmGFILERESULT',
'SbmIMPLEMENT',
'SbmPARAMETERS',
'SbmPUBLICATION',
'SbmPUBLICATIONCOMM',
'SbmPUBLICATIONDATA',
'SbmREFEREES',
'SbmSUBMISSIONS')
| gpl-2.0 | 2,390,014,930,960,900,600 | 36.674242 | 78 | 0.61477 | false |
rhiever/sklearn-benchmarks | model_code/grid_search/SGDClassifier.py | 1 | 1597 | import sys
import pandas as pd
import numpy as np
import itertools
from sklearn.preprocessing import RobustScaler
from sklearn.linear_model import SGDClassifier
from evaluate_model import evaluate_model
dataset = sys.argv[1]
pipeline_components = [RobustScaler, SGDClassifier]
pipeline_parameters = {}
loss_values = ['hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron']
penalty_values = ['l2', 'l1', 'elasticnet']
alpha_values = [0.000001, 0.00001, 0.0001, 0.001, 0.01]
learning_rate_values = ['constant', 'optimal', 'invscaling']
fit_intercept_values = [True, False]
l1_ratio_values = [0., 0.1, 0.15, 0.25, 0.5, 0.75, 0.9, 1.]
eta0_values = [0.0, 0.01, 0.1, 0.5, 1., 10., 50., 100.]
power_t_values = [0., 0.1, 0.5, 1., 10., 50., 100.]
random_state = [324089]
all_param_combinations = itertools.product(loss_values, penalty_values, alpha_values, learning_rate_values, fit_intercept_values, l1_ratio_values, eta0_values, power_t_values, random_state)
pipeline_parameters[SGDClassifier] = \
[{'loss': loss, 'penalty': penalty, 'alpha': alpha, 'learning_rate': learning_rate, 'fit_intercept': fit_intercept, 'l1_ratio': l1_ratio, 'eta0': eta0, 'power_t': power_t, 'random_state': random_state}
for (loss, penalty, alpha, learning_rate, fit_intercept, l1_ratio, eta0, power_t, random_state) in all_param_combinations
if not (penalty != 'elasticnet' and l1_ratio != 0.15) and not (learning_rate not in ['constant', 'invscaling'] and eta0 != 0.0) and not (learning_rate != 'invscaling' and power_t != 0.5)]
evaluate_model(dataset, pipeline_components, pipeline_parameters)
| mit | 2,270,912,534,851,505,000 | 52.233333 | 204 | 0.705698 | false |
sitexa/foobnix | foobnix/gui/top.py | 1 | 2239 | #-*- coding: utf-8 -*-
'''
Created on 22 сент. 2010
@author: ivan
'''
from gi.repository import Gtk
from foobnix.gui.model.signal import FControl
from foobnix.gui.state import LoadSave
from foobnix.gui.menu import MenuBarWidget
from foobnix.helpers.my_widgets import ImageButton
from foobnix.helpers.menu import Popup
from foobnix.fc.fc import FC
from foobnix.util.widget_utils import MenuStyleDecorator
class TopWidgets(FControl, LoadSave, Gtk.HBox):
def __init__(self, controls):
FControl.__init__(self, controls)
Gtk.HBox.__init__(self, False, 0)
self.old_menu = MenuBarWidget(controls)
self.pack_start(self.old_menu.widget, False, False, 0)
self.new_menu_button = ImageButton(Gtk.STOCK_PREFERENCES)
self.new_menu_button.connect("button-press-event", self.on_button_press)
self.pack_start(self.new_menu_button, False, False, 0)
self.pack_start(controls.playback, False, False, 0)
self.pack_start(controls.os, False, False, 0)
self.pack_start(controls.volume, False, False, 0)
self.pack_start(Gtk.SeparatorToolItem.new(), False, False, 0)
self.pack_start(controls.record, False, False, 0)
self.pack_start(controls.seek_bar, True, True, 0)
"""menu init"""
menu = Popup()
decorator = MenuStyleDecorator()
MenuBarWidget(self.controls, menu)
menu.add_separator()
menu.add_item(_("Preferences"), Gtk.STOCK_PREFERENCES, self.controls.show_preferences)
menu.add_separator()
menu.add_item(_("Quit"), Gtk.STOCK_QUIT, self.controls.quit)
decorator.apply(menu)
self.menu = menu
def update_menu_style(self):
if FC().menu_style == "new":
self.old_menu.widget.hide()
self.new_menu_button.show()
else:
self.old_menu.widget.show()
self.new_menu_button.hide()
def on_save(self):
self.controls.volume.on_save()
self.old_menu.on_save()
def on_load(self):
self.controls.volume.on_load()
self.old_menu.on_load()
self.controls.os.on_load()
self.update_menu_style()
def on_button_press(self, w, e):
self.menu.show(e)
| gpl-3.0 | 2,132,427,195,450,941,200 | 30.928571 | 94 | 0.640268 | false |
inkhey/Transvoyage.py | transvoyage.py | 1 | 13138 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# transvoyage.py
# Version 0.3
#
# Copyright 2014 Guénaël Muller <contact@inkey-art.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#TODO
# - se conformer à la PEP 8
# - commentaires et TODO bilingue.
# - optimisations
# - traduction inversé amélioré
# - nouveau langages
# - debugage de certains regex et autres
import sys
import os
import subprocess
import re
import urllib
import argparse
# traductions des types Articles
listTypeFr=('Ville','Région continentale','Région','Pays' ,'Quartier','Itinéraire','Parc')
listTypeEn=('city' ,'continent' ,'region','country','district','itinerary' ,'park')
#Equivalences sections
listSectionFr=["Comprendre","Aller" ,"Circuler" ,"Voir","Faire","Acheter","Manger","Boire un verre / Sortir","Se loger","Aux environs","Travailler","Apprendre","Gérer le Quotidien","Sécurité","Communiquer"]
listSectionEn=["Understand","Get in","Get around","See" ,"Do" ,"Buy" ,"Eat" ,"Drink" ,"Sleep" ,"Go next","Work" ,"Learn" ,"Cope" ,"Stay safe", "Connect" ]
listSectionFr.extend(["Respecter","Parler","Éléctricité"])
listSectionEn.extend(["Respect","Talk","Electricity"])
listSectionFr.extend(["Se préparer","Étapes","Autres destinations","Lire","Douanes","En taxi","Santé","Monnaie","Villes","Régions","Quartiers","Bureaux d'information touristique"])
listSectionEn.extend(["Prepare","Route","Other destinations","Read","Customs","By taxi","Stay healthy","Currency","Cities","Regions","Districts","Tourist office"])
listSectionFr.extend(['Histoire', 'Paysage', 'Flore et faune',"Climat","Randonnée","Droits d'accès","Droits d'accès","Activités","Météo","Nature"])
listSectionEn.extend(['History', 'Landscape', 'Flora and fauna',"Climate","Hiking","Fees/permits","Fees/Permits","Activities","Weather","Wildlife"])
listSectionFr.extend(['À pied', 'En train', 'En bus',"En avion","En ferry","En bateau","En voiture","En vélo","En vélo","En vélo","En motoneige","En stop"])
listSectionEn.extend(['By foot', 'By train', 'By bus',"By plane","By ferry","By boat","By car","By bicycle","By cycle","By bike","By snowmobile","By thumb"])
listSectionFr.extend(['Bon marché', 'Prix moyen','Prix moyen', 'Luxe','Hôtel','Logements','Dans la nature'])
listSectionEn.extend(['Budget', 'Mid-range','Mid range', 'Splurge','Hotel','Lodging','Backcountry'])
# Équivalence image
listImageFr=["[[Fichier:","[[Fichier:","gauche","droite","vignette","vignette"]
listImageEn=["[[Image:","[[File:","left","right","thumbnail","thumb"]
#Equivalence Listings
#titre listings
listListingDebFr=["Listing","Faire","Voir","Acheter","Manger","Sortir","Se loger","Destination","Aller","Circuler"]
listListingDebEn=["listing","do" ,"see","buy","eat","drink","sleep","listing","listing","listing"]
#paramètres
listListingFr=["nom=","adresse=","téléphone","latitude=","longitude=","email=","direction=","numéro gratuit=","fax=","prix=","description=<!-- ","-->}}","arrivée=","départ=","horaire="]
listListingEn=["name=","address=" ,"phone","lat=","long=","email=","directions=","tollfree=","fax=","price=","content=","}}","checkin=","checkout=","hours="]
#Equivalence Itineraires
listItineraireFr=["Jour ",": De"," à "]
listItineraireEn=["Day ",":"," to "]
#Equivalence Dans
listDansFr=["Dans"]
listDansEn=["IsPartOf"]
#Equivalence Carte
#Debut
listMapDebFr=["ListeRegions","carte=","taillecarte="]
listMapDebEn=["Regionlist","regionmap=","regionmapsize="]
#Paramètres
listMapFr=["nomregion0=","couleurregion0=","elementsregion0=","descriptionregion0="]
listMapEn=["region0name=","region0color=","region0items=","region0description="]
# Tout les regex en string par langue de Destination
RegSFr=["(.*)\[\[(Image|Fichier):(.*)\s*$","(=+)(.*)(=+)(.*)","(.*){{(Listing|Faire|Voir|Acheter|Manger|Boire|Sortir|Se loger|Destination|Aller|Circuler)\s(.*)\s*$","(.*)}}[.\s]*$","{{Dans\|(.*)}}\s*$"]
# 0 1 2 3 4
RegSFr.extend(["^(=+)(.*) à (.*)(=+)\s*$","(.*){{ListeRegions(.*)","(.*)region([0-9]+)=(.*)","{{Avancement\|statut=(ébauche|esquisse|utilisable|guide|étoile)\|type=0}}(.*)","(.*){{Climat(.*)","(.*){{Représentation diplomatique"])
# 5 6 7 8 9 10
RegSEn=["(.*)\[\[(Image|File):(.*)\s*$", "(=+)(.*)(=+)(.*)","(.*){{(listing|do|see|buy|eat|drink|sleep)\s(.*)\s*$","(.*)}}[.\s]*$","{{IsPartOf\|(.*)}}\s*$"]
# 0 1 2 3 4
RegSEn.extend(["^(=+)(.*) to (.*)(=+)\s*$","(.*){{Regionlist(.*)","(.*)region(.*)name=(.*)","{{(outline|usable|guide|stub|star)0}}(.*)","(.*){{Climate(.*)","(.*){{flag|(.*)}}(.*){{Listing(.*)"])
# 5 6 7 8 9 10
#Avancement
avFr="{{Avancement|statut=esquisse|type=0}}\n"
avEn="{{outline0}}\n"
#Equivalence climat
listMoisFr=["jan","fev","mar","avr","mai","jui","jul","aou","sep","oct","nov","dec"]
listMoisEn=["jan","feb","mar","apr","may","jun","jul","aug","sep","oct","nov","dec"]
listClimatFr=["Climat","description"]
listClimatEn=["Climate","description"]
for mois in listMoisFr :
listClimatFr.append("tmin-"+mois)
listClimatFr.append("tmax-"+mois)
listClimatFr.append("prec-"+mois)
for mois in listMoisEn :
listClimatEn.append(mois+"low")
listClimatEn.append(mois+"high")
listClimatEn.append(mois+"precip")
# Trousse à mots par langues
ListFr=(listTypeFr,listSectionFr,listImageFr,listListingDebFr,listListingFr,listItineraireFr,listDansFr,listMapDebFr,listMapFr,RegSFr,avFr,listClimatFr)
ListEn=(listTypeEn,listSectionEn,listImageEn,listListingDebEn,listListingEn,listItineraireEn,listDansEn,listMapDebEn,listMapEn,RegSEn,avEn,listClimatEn)
# 0 1 2 3 4 5 6 7 8 9 10 11
ListingsSpecialFr={"Villes":"Ville","Autres destinations":"Destinations","Aux environs":"Destinations"}
#lien langage/trousse
ListLang ={"fr":ListFr, "en":ListEn}
#Langue source et destination et contenu récupérer
src=ListEn
dest=ListFr
lang="en"
content=""
section=""
# Pour récupérér le type de l'article (Ville,Itinéraire,Quartier,etc…)
def recupTypeArticle() :
typeArticle = dest[0][0]
listRegex = list()
for mot in src[0] :
s=src[9][8].replace("0",mot)
listRegex.append(re.compile(s))
bOk=True
for line in content:
if (not bOk) :
break
for i in range (len(listRegex)) :
if listRegex[i].search(line) :
typeArticle=dest[0][i]
bOk=False
break
return typeArticle
#Pour créer l'entête
def creationEntete (typeArticle,titre) :
s=""
if dest==ListFr : # Si on traduit en français
s="""{{Bannière page}}
{{Info """+typeArticle+"""
| nom=
| nom local=
| région=
| image=
| légende image=
| rivière=
| superficie=
| population=
| population agglomération=
| année population=
| altitude=
| latitude=
| longitude=
| zoom=
| code postal=
| indicatif=
| adresse OT=
| horaire OT=
| téléphone OT=
| numéro gratuit OT=
| email OT=
| facebook OT=
| twitter OT=
| URL OT=
| URL officiel=
| URL touristique=
}}
"""
return s
# Pour récupérer les images (et les traduire)
def recupImage(line) :
s=line
for i in range (len(src[2])) :
s=s.replace(src[2][i],dest[2][i])
return s
#Pour récupérer les sections et sous-sections
def recupSection(line) :
s=line
for i in range (len(src[1])) :
s=s.replace(src[1][i],dest[1][i])
return s
#Pour récupérer les listings
def recupListing(line,debut) :
s=line
if debut :
for i in range (len(src[3])) :
s=s.replace(src[3][i],dest[3][i])
for i in range (len(src[4])) :
s=s.replace(src[4][i],dest[4][i])
return s
#Pour récupérer les sections d'étapes
def recupItineraire(line) :
s=line
for i in range (len(src[5])) :
s=s.replace(src[5][i],dest[5][i])
return s
#Pour récupérer la catégorisation
def recupDans(line) :
s=line
for i in range (len(src[6])) :
s=s.replace(src[6][i],dest[6][i])
return s
#Pour récupérer les cartes avec régions
def recupMap(line,numMap) :
s=line
if numMap == 0 :
for i in range (len(src[7])) :
s=s.replace(src[7][i],dest[7][i])
numPrec=str(numMap-1)
sNumMap=str(numMap)
for i in range (len(src[8])):
src[8][i]=src[8][i].replace(numPrec,sNumMap)
dest[8][i]=dest[8][i].replace(numPrec,sNumMap)
if numMap > 0 :
for i in range (len(src[8])) :
s=s.replace(src[8][i],dest[8][i])
return s
def recupClimat(line) :
s=line
for i in range (len(src[11])):
s=s.replace(src[11][i],dest[11][i])
return s
#Programme en lui même
parser = argparse.ArgumentParser()
parser.add_argument('title',help="nom de la page à convertir" )
parser.add_argument('--src',help="langage source : fr,en,… par défault fr ")
parser.add_argument('--dest',help="langage destination : fr,en,… par défault en ")
parser.add_argument('-d','--debug',action='store_true' ,help="mode debugage : récupération du fichier source en même temps que le résultat")
parser.add_argument('-C','--nocomment',action='store_true' ,help="commentaires désactivé dans le résultat ")
args = parser.parse_args()
bAv=False # Pour savoir si la bannière d'avancement à été placé
result="" # Pou stocké le resultat
#arguments
title=args.title
if args.src and args.src.lower() in ListLang.keys() :
src=ListLang[args.src.lower()]
lang=args.src.lower()
if args.dest and args.dest.lower() in ListLang.keys() :
dest=ListLang[args.dest.lower()]
url="https://"+lang+".wikivoyage.org/w/index.php?title="+title+"&action=raw"
content=urllib.urlopen(url).readlines()
# on récupère le type de l'article et on crée l'entête
TypeArticle=recupTypeArticle()
result +=creationEntete(TypeArticle,title)
# les différents regex
regImg =re.compile(src[9][0])
regSection =re.compile(src[9][1])
regListing =re.compile(src[9][2])
regListingEnd =re.compile(src[9][3])
regDans =re.compile(src[9][4])
regItineraire =re.compile(src[9][5])
regMap =re.compile(src[9][6])
regNomRegion =re.compile(src[9][7])
regClimat =re.compile(src[9][9])
regDiplomat =re.compile(src[9][10])
# On ouvre et on lit
i=0
numMap=-1
bClimat=False
bListing=False
for line in content:
i=i+1
if numMap>-1 :
if regNomRegion.search(line) :
numMap=numMap+1
result+=recupMap(line,numMap)
if regListingEnd.search(line) :
sNumMap=str(numMap)
for i in range (len(src[8])):
src[8][i]=src[8][i].replace(sNumMap,"0")
dest[8][i]=dest[8][i].replace(sNumMap,"0")
numMap=-1
if bClimat or regClimat.search(line):
result+=recupClimat(line)
bClimat=True
if regListingEnd.search(line) :
bClimat=False
elif bListing :
s=recupListing(line,False)
if regListingEnd.search(line) :
bListing=False
if not regListingEnd.search(s) :
s+="}}"
result+=s
elif regDiplomat.search(line) and dest==ListFr :
s="* {{Représentation diplomatique"
bListing=True
result+=s
elif regMap.search(line) :
numMap=0
result+=recupMap(line,numMap)
elif regItineraire.search(line) :
result+=recupItineraire(line)
elif regListing.search(line) :
s=recupListing(line,True)
if dest==ListFr and section in ListingsSpecialFr.keys() :
s=s.replace('Listing',ListingsSpecialFr[section])
result+=s
bListing=True
elif regImg.search(line) :
result+=recupImage(line)
elif regSection.search(line) :
s=recupSection(line)
if len(s)>3 and s[2] !="=" :
section=s.replace("==","").replace("\n","")
result+=s
elif regDans.search(line) :
s=dest[10].replace("0",TypeArticle.lower()) #avancement
result+=s
bAv=True
result+=recupDans(line)
if (not bAv) : # Si la bannière avancement n'a toujour pas été placé
s=dest[10].replace("0",TypeArticle.lower())
result+=s
# On écrit les fichiers
title=title.replace("/","-")
title=title.replace(".","-")
if args.nocomment is True :
result=re.sub(r'<!--(.*)(.|\n)(.*)-->',r'\2',result)
with open("./"+title+".txt", "w") as fichier:
fichier.write(result)
if args.debug is True :
with open("./"+title+"_src.txt", "w") as fichier:
fichier.writelines(content)
| gpl-3.0 | -8,850,327,259,524,849,000 | 35.261838 | 229 | 0.639038 | false |
MultiNet-80211/Hostapd | wpa_supplicant/examples/wpas-dbus-new-signals.py | 1 | 6275 | #!/usr/bin/python
import dbus
import sys, os
import time
import gobject
from dbus.mainloop.glib import DBusGMainLoop
WPAS_DBUS_SERVICE = "fi.w1.wpa_supplicant1"
WPAS_DBUS_INTERFACE = "fi.w1.wpa_supplicant1"
WPAS_DBUS_OPATH = "/fi/w1/wpa_supplicant1"
WPAS_DBUS_INTERFACES_INTERFACE = "fi.w1.wpa_supplicant1.Interface"
WPAS_DBUS_INTERFACES_OPATH = "/fi/w1/wpa_supplicant1/Interfaces"
WPAS_DBUS_BSS_INTERFACE = "fi.w1.wpa_supplicant1.BSS"
WPAS_DBUS_NETWORK_INTERFACE = "fi.w1.wpa_supplicant1.Network"
def byte_array_to_string(s):
import urllib
r = ""
for c in s:
if c >= 32 and c < 127:
r += "%c" % c
else:
r += urllib.quote(chr(c))
return r
def list_interfaces(wpas_obj):
ifaces = wpas_obj.Get(WPAS_DBUS_INTERFACE, 'Interfaces',
dbus_interface=dbus.PROPERTIES_IFACE)
for path in ifaces:
if_obj = bus.get_object(WPAS_DBUS_SERVICE, path)
ifname = if_obj.Get(WPAS_DBUS_INTERFACES_INTERFACE, 'Ifname',
dbus_interface=dbus.PROPERTIES_IFACE)
print ifname
def interfaceAdded(interface, properties):
print "InterfaceAdded(%s): Ifname=%s" % (interface, properties['Ifname'])
def interfaceRemoved(interface):
print "InterfaceRemoved(%s)" % (interface)
def propertiesChanged(properties):
for i in properties:
print "PropertiesChanged: %s=%s" % (i, properties[i])
def showBss(bss):
net_obj = bus.get_object(WPAS_DBUS_SERVICE, bss)
net = dbus.Interface(net_obj, WPAS_DBUS_BSS_INTERFACE)
# Convert the byte-array for SSID and BSSID to printable strings
val = net_obj.Get(WPAS_DBUS_BSS_INTERFACE, 'BSSID',
dbus_interface=dbus.PROPERTIES_IFACE)
bssid = ""
for item in val:
bssid = bssid + ":%02x" % item
bssid = bssid[1:]
val = net_obj.Get(WPAS_DBUS_BSS_INTERFACE, 'SSID',
dbus_interface=dbus.PROPERTIES_IFACE)
ssid = byte_array_to_string(val)
val = net_obj.Get(WPAS_DBUS_BSS_INTERFACE, 'WPAIE',
dbus_interface=dbus.PROPERTIES_IFACE)
wpa = "no"
if val != None:
wpa = "yes"
val = net_obj.Get(WPAS_DBUS_BSS_INTERFACE, 'RSNIE',
dbus_interface=dbus.PROPERTIES_IFACE)
wpa2 = "no"
if val != None:
wpa2 = "yes"
freq = net_obj.Get(WPAS_DBUS_BSS_INTERFACE, 'Frequency',
dbus_interface=dbus.PROPERTIES_IFACE)
signal = net_obj.Get(WPAS_DBUS_BSS_INTERFACE, 'Signal',
dbus_interface=dbus.PROPERTIES_IFACE)
val = net_obj.Get(WPAS_DBUS_BSS_INTERFACE, 'Rates',
dbus_interface=dbus.PROPERTIES_IFACE)
if len(val) > 0:
maxrate = val[0] / 1000000
else:
maxrate = 0
print " %s :: ssid='%s' wpa=%s wpa2=%s signal=%d rate=%d freq=%d" % (bssid, ssid, wpa, wpa2, signal, maxrate, freq)
def scanDone(success):
gobject.MainLoop().quit()
print "Scan done: success=%s" % success
def scanDone2(success, path=None):
print "Scan done: success=%s [path=%s]" % (success, path)
def bssAdded(bss, properties):
print "BSS added: %s" % (bss)
showBss(bss)
def bssRemoved(bss):
print "BSS removed: %s" % (bss)
def blobAdded(blob):
print "BlobAdded(%s)" % (blob)
def blobRemoved(blob):
print "BlobRemoved(%s)" % (blob)
def networkAdded(network, properties):
print "NetworkAdded(%s)" % (network)
def networkRemoved(network):
print "NetworkRemoved(%s)" % (network)
def networkSelected(network):
print "NetworkSelected(%s)" % (network)
def propertiesChangedInterface(properties):
for i in properties:
print "PropertiesChanged(interface): %s=%s" % (i, properties[i])
def propertiesChangedBss(properties):
for i in properties:
print "PropertiesChanged(BSS): %s=%s" % (i, properties[i])
def propertiesChangedNetwork(properties):
for i in properties:
print "PropertiesChanged(Network): %s=%s" % (i, properties[i])
def main():
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
global bus
bus = dbus.SystemBus()
wpas_obj = bus.get_object(WPAS_DBUS_SERVICE, WPAS_DBUS_OPATH)
if len(sys.argv) != 2:
list_interfaces(wpas_obj)
os._exit(1)
wpas = dbus.Interface(wpas_obj, WPAS_DBUS_INTERFACE)
bus.add_signal_receiver(interfaceAdded,
dbus_interface=WPAS_DBUS_INTERFACE,
signal_name="InterfaceAdded")
bus.add_signal_receiver(interfaceRemoved,
dbus_interface=WPAS_DBUS_INTERFACE,
signal_name="InterfaceRemoved")
bus.add_signal_receiver(propertiesChanged,
dbus_interface=WPAS_DBUS_INTERFACE,
signal_name="PropertiesChanged")
ifname = sys.argv[1]
path = wpas.GetInterface(ifname)
if_obj = bus.get_object(WPAS_DBUS_SERVICE, path)
iface = dbus.Interface(if_obj, WPAS_DBUS_INTERFACES_INTERFACE)
iface.connect_to_signal("ScanDone", scanDone2,
path_keyword='path')
bus.add_signal_receiver(scanDone,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="ScanDone",
path=path)
bus.add_signal_receiver(bssAdded,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="BSSAdded",
path=path)
bus.add_signal_receiver(bssRemoved,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="BSSRemoved",
path=path)
bus.add_signal_receiver(blobAdded,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="BlobAdded",
path=path)
bus.add_signal_receiver(blobRemoved,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="BlobRemoved",
path=path)
bus.add_signal_receiver(networkAdded,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="NetworkAdded",
path=path)
bus.add_signal_receiver(networkRemoved,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="NetworkRemoved",
path=path)
bus.add_signal_receiver(networkSelected,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="NetworkSelected",
path=path)
bus.add_signal_receiver(propertiesChangedInterface,
dbus_interface=WPAS_DBUS_INTERFACES_INTERFACE,
signal_name="PropertiesChanged",
path=path)
bus.add_signal_receiver(propertiesChangedBss,
dbus_interface=WPAS_DBUS_BSS_INTERFACE,
signal_name="PropertiesChanged")
bus.add_signal_receiver(propertiesChangedNetwork,
dbus_interface=WPAS_DBUS_NETWORK_INTERFACE,
signal_name="PropertiesChanged")
gobject.MainLoop().run()
if __name__ == "__main__":
main()
| gpl-2.0 | 1,071,992,416,003,939,100 | 28.91133 | 124 | 0.69259 | false |
itsCoder/itscoder.github.io | generate_post.py | 1 | 1594 | # coding=utf-8
import os
import re
FUCK_STR = ' '
PATTERN_PHASE_FILE = re.compile('\S+-weeklyblog-phase-(\d+)\.md')
PATTERN_POST = re.compile('-\s*\[(.+)\]\((https?://\S+)\)\s*\(\[@(.+)\]\((.+)\)\)')
PATTERN_CATEGORY = re.compile('#{5}\s*(.*?)\n')
BLOG_DIR = '_posts/'
def get_post(f, phase):
phase_summary = ''
with open(f, 'r') as md:
content = md.readline().replace(FUCK_STR, " ")
category = ''
while content:
if re.match(PATTERN_CATEGORY, content):
category = re.match(PATTERN_CATEGORY, content).group(1)
else:
post = re.match(PATTERN_POST, content)
if post:
# | 这是文章标题 | Android | 作者链接 |
phase_summary += '| [%s](%s) | %s | [%s](%s) |%s|\n' % (
post.group(1), post.group(2), category,
post.group(3), post.group(4), phase)
content = md.readline().replace(FUCK_STR, " ")
return phase_summary
if __name__ == '__main__':
with open('README.md', 'w') as post_md:
th = '| 标题 | 类别 | 作者 | 期数 |\n| :---- | :-----: | :--: | :-----: |\n'
post_md.write(th)
f_list = os.listdir(BLOG_DIR)
f_list.reverse()
for f_name in f_list:
f = os.path.join(BLOG_DIR, f_name)
if os.path.isfile(f):
result = re.match(PATTERN_PHASE_FILE, f_name)
if result:
phase_count = result.group(1)
post_md.write(get_post(f, phase_count)) | mit | -2,145,483,734,492,894,700 | 32.869565 | 83 | 0.466924 | false |
sdrogers/ms2ldaviz | ms2ldaviz/setup_feat_col.py | 1 | 2204 | import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ms2ldaviz.settings_simon")
import django
django.setup()
from basicviz.models import *
if __name__ == '__main__':
other_experiment_name = sys.argv[1]
fs,status = BVFeatureSet.objects.get_or_create(name = 'binned_005')
if status:
print("Created feature set")
else:
print("Featureset already exists")
mbe = Experiment.objects.get(name = 'massbank_binned_005')
print("Got " + str(mbe))
if mbe.featureset == None:
mbe.featureset = fs
mbe.save()
mbe_features = Feature.objects.filter(experiment = mbe)
print("Got {} features".format(len(mbe_features)))
mbe_features_sub = Feature.objects.filter(experiment = mbe,featureset = None)
print("{} have no featureset".format(len(mbe_features_sub)))
for f in mbe_features_sub:
f.featureset = fs
f.save()
# Now get the features as tied to the feature set
mbe_features = Feature.objects.filter(featureset = fs)
print("Got {} features".format(len(mbe_features)))
fnames = set([f.name for f in mbe_features])
# get another experiment
i93 = Experiment.objects.get(name = other_experiment_name)
i93.featureset = fs
i93.save()
print("Got " + str(i93))
i93_features = Feature.objects.filter(experiment = i93)
print("Got {} features".format(len(i93_features)))
for f in i93_features:
if f.name in fnames:
# Find all the instances
fis = FeatureInstance.objects.filter(feature = f)
gfeature = [g for g in mbe_features if g.name == f.name][0]
for fi in fis:
fi.feature = gfeature
fi.save()
mis = Mass2MotifInstance.objects.filter(feature = f)
for ms in mis:
ms.feature = gfeature
ms.save()
else:
new_feature = Feature.objects.create(name = f.name,featureset = fs,min_mz = f.min_mz,max_mz = f.max_mz)
fis = FeatureInstance.objects.filter(feature = f)
for fi in fis:
fi.feature = new_feature
fi.save()
mis = Mass2MotifInstance.objects.filter(feature = f)
for ms in mis:
ms.feature = new_feature
ms.save()
for f in i93_features:
if len(f.featureinstance_set.all()) == 0 and len(f.mass2motifinstance_set.all()) == 0 and len(f.featuremap_set.all()) == 0:
f.delete()
else:
print(f) | mit | 8,517,246,142,656,428,000 | 25.890244 | 125 | 0.685118 | false |
blue-yonder/pyscaffold | tests/test_update.py | 1 | 7140 | # -*- coding: utf-8 -*-
import logging
import os
import re
from os.path import join as path_join
from pathlib import Path
from pkg_resources import parse_version, working_set
import pytest
from pyscaffold import __version__, structure, update
from pyscaffold.utils import chdir
from .helpers import uniqstr
EDITABLE_PYSCAFFOLD = re.compile(r"^-e.+pyscaffold.*$", re.M | re.I)
def test_apply_update_rules_to_file(tmpfolder, caplog):
caplog.set_level(logging.INFO)
NO_OVERWRITE = structure.FileOp.NO_OVERWRITE
NO_CREATE = structure.FileOp.NO_CREATE
# When update is False (no project exists yet) always update
opts = {"update": False}
res = update.apply_update_rule_to_file("a", ("a", NO_CREATE), opts)
assert res == "a"
# When content is string always update
opts = {"update": True}
res = update.apply_update_rule_to_file("a", "a", opts)
assert res == "a"
# When force is True always update
opts = {"update": True, "force": True}
res = update.apply_update_rule_to_file("a", ("a", NO_CREATE), opts)
assert res == "a"
# When file exist, update is True, rule is NO_OVERWRITE, do nothing
opts = {"update": True}
fname = uniqstr()
tmpfolder.join(fname).write("content")
res = update.apply_update_rule_to_file(fname, (fname, NO_OVERWRITE), opts)
assert res is None
logs = caplog.text
assert re.search("skip.*" + fname, logs)
# When file does not exist, update is True, but rule is NO_CREATE, do
# nothing
opts = {"update": True}
fname = uniqstr()
res = update.apply_update_rule_to_file(fname, (fname, NO_CREATE), opts)
assert res is None
assert re.search("skip.*" + fname, caplog.text)
def test_apply_update_rules(tmpfolder):
NO_OVERWRITE = structure.FileOp.NO_OVERWRITE
NO_CREATE = structure.FileOp.NO_CREATE
opts = dict(update=True)
struct = {
"a": ("a", NO_OVERWRITE),
"b": "b",
"c": {"a": "a", "b": ("b", NO_OVERWRITE)},
"d": {"a": ("a", NO_OVERWRITE), "b": ("b", NO_CREATE)},
"e": ("e", NO_CREATE),
}
dir_struct = {"a": "a", "c": {"b": "b"}}
exp_struct = {"b": "b", "c": {"a": "a"}, "d": {"a": "a"}}
structure.create_structure(dir_struct, opts)
res_struct, _ = update.apply_update_rules(struct, opts)
assert res_struct == exp_struct
class VenvManager(object):
def __init__(self, tmpdir, venv, pytestconfig):
self.tmpdir = str(tmpdir) # convert Path to str
self.installed = False
self.venv = venv
self.venv_path = str(venv.virtualenv)
self.pytestconfig = pytestconfig
self.venv.install_package("install coverage", installer="pip")
self.running_version = parse_version(__version__)
def install_this_pyscaffold(self):
# Normally the following command should do the trick
# self.venv.install_package('PyScaffold')
# but sadly pytest-virtualenv chokes on the src-layout of PyScaffold
# ToDo: The following will fail on Windows...
if "TOXINIDIR" in os.environ:
# so py.test runs within tox
src_dir = os.environ["TOXINIDIR"]
else:
installed = [p for p in working_set if p.project_name == "PyScaffold"]
msg = "Install PyScaffold with python setup.py develop!"
assert installed, msg
src_dir = path_join(installed[0].location, "..")
cmd = "{python} setup.py -q develop".format(python=self.venv.python)
self.run(cmd, cwd=src_dir)
# Make sure pyscaffold was not installed using PyPI
assert self.running_version.public <= self.pyscaffold_version().public
pkg_list = self.run("{} -m pip freeze".format(self.venv.python))
assert EDITABLE_PYSCAFFOLD.findall(pkg_list)
self.installed = True
return self
def install_pyscaffold(self, major, minor):
ver = "pyscaffold>={major}.{minor},<{major}.{next_minor}a0".format(
major=major, minor=minor, next_minor=minor + 1
)
# we need the extra "" to protect from interpretation by the shell
self.venv.install_package('install "{}"'.format(ver), installer="pip")
installed_version = self.pyscaffold_version()._version.release[:2]
assert installed_version == (major, minor)
self.installed = True
return self
def uninstall_pyscaffold(self):
self.run("pip uninstall -y pyscaffold")
assert "PyScaffold" not in self.venv.installed_packages().keys()
self.installed = False
return self
def pyscaffold_version(self):
version = self.venv.installed_packages().get("PyScaffold", None)
if version:
return parse_version(version.version)
else:
return None
def putup(self, *args, with_coverage=False, **kwargs):
if with_coverage:
# need to pass here as list since its args to coverage.py
args = [subarg for arg in args for subarg in arg.split()]
putup_path = path_join(self.venv_path, "bin", "putup")
cmd = [putup_path] + args
else:
# need to pass here as string since it's the cmd itself
cmd = " ".join(["putup"] + list(args))
self.run(cmd, with_coverage=with_coverage, **kwargs)
return self
def run(self, cmd, with_coverage=False, **kwargs):
if with_coverage:
kwargs.setdefault("pytestconfig", self.pytestconfig)
# change to directory where .coverage needs to be created
kwargs.setdefault("cd", os.getcwd())
return self.venv.run_with_coverage(cmd, **kwargs).strip()
else:
with chdir(self.tmpdir):
kwargs.setdefault("cwd", self.tmpdir)
return self.venv.run(cmd, capture=True, **kwargs).strip()
def get_file(self, path):
with chdir(self.tmpdir):
return Path(path).read_text()
@pytest.fixture
def venv_mgr(tmpdir, venv, pytestconfig):
return VenvManager(tmpdir, venv, pytestconfig)
@pytest.mark.slow
def test_update_version_3_0_to_3_1(with_coverage, venv_mgr):
project = path_join(venv_mgr.venv_path, "my_old_project")
(
venv_mgr.install_pyscaffold(3, 0)
.putup(project)
.uninstall_pyscaffold()
.install_this_pyscaffold()
.putup("--update {}".format(project), with_coverage=with_coverage)
)
setup_cfg = venv_mgr.get_file(path_join(project, "setup.cfg"))
assert "[options.entry_points]" in setup_cfg
assert "setup_requires" in setup_cfg
@pytest.mark.slow
def test_update_version_3_0_to_3_1_pretend(with_coverage, venv_mgr):
project = path_join(venv_mgr.venv_path, "my_old_project")
(
venv_mgr.install_pyscaffold(3, 0)
.putup(project)
.uninstall_pyscaffold()
.install_this_pyscaffold()
.putup("--pretend --update {}".format(project), with_coverage=with_coverage)
)
setup_cfg = venv_mgr.get_file(path_join(project, "setup.cfg"))
assert "[options.entry_points]" not in setup_cfg
assert "setup_requires" not in setup_cfg
| mit | 422,592,667,080,846,660 | 36.578947 | 84 | 0.621429 | false |
uni-peter-zheng/tp-qemu | openvswitch/tests/ovs_basic.py | 4 | 10440 | import logging
import time
import os
from virttest import utils_misc, aexpect, utils_net, openvswitch, ovs_utils
from virttest import versionable_class, data_dir
from autotest.client.shared import error
def allow_iperf_firewall(machine):
machine.cmd("iptables -I INPUT -p tcp --dport 5001 --j ACCEPT")
machine.cmd("iptables -I INPUT -p udp --dport 5001 --j ACCEPT")
class MiniSubtest(object):
def __new__(cls, *args, **kargs):
self = super(MiniSubtest, cls).__new__(cls)
ret = None
if args is None:
args = []
try:
if hasattr(self, "setup"):
self.setup(*args, **kargs)
ret = self.test(*args, **kargs)
finally:
if hasattr(self, "clean"):
self.clean(*args, **kargs)
return ret
class InfrastructureInit(MiniSubtest):
def setup(self, test, params, env):
self.br0_name = "br0-%s" % (utils_misc.generate_random_string(3))
while self.br0_name in utils_net.get_net_if():
self.br0_name = "br0-%s" % (utils_misc.generate_random_string(3))
self.br0_ip = params.get("bridge_ip", "192.168.250.1")
self.ovs = None
error.context("Try to log into guest.")
self.vms = [env.get_vm(vm) for vm in params.get("vms").split()]
for vm in self.vms:
vm.verify_alive()
error.context("Start OpenVSwitch.")
self.ovs = versionable_class.factory(openvswitch.OpenVSwitchSystem)()
self.ovs.init_system()
self.ovs.check()
error.context("Add new bridge %s." % (self.br0_name))
self.ovs.add_br(self.br0_name)
utils_net.set_net_if_ip(self.br0_name, self.br0_ip)
utils_net.bring_up_ifname(self.br0_name)
self.dns_pidf = (utils_net.check_add_dnsmasq_to_br(self.br0_name,
test.tmpdir))
error.context("Add new ports from vms %s to bridge %s." %
(self.vms, self.br0_name))
for vm in self.vms:
utils_net.change_iface_bridge(vm.virtnet[1],
self.br0_name,
self.ovs)
logging.debug(self.ovs.status())
self.host = ovs_utils.Machine(src=test.srcdir)
self.mvms = [ovs_utils.Machine(vm) for vm in self.vms]
self.machines = [self.host] + self.mvms
# ForAllP(self.mvms).cmd("dhclinet")
time.sleep(5)
utils_misc.ForAllP(self.machines).fill_addrs()
def clean(self, test, params, env):
if self.ovs:
try:
if self.dns_pidf is not None:
utils_misc.signal_program(self.dns_pidf[0:-4],
pid_files_dir=test.tmpdir)
except:
pass
try:
self.ovs.del_br(self.br0_name)
except Exception:
pass
if self.ovs.cleanup:
self.ovs.clean()
@error.context_aware
def run(test, params, env):
"""
Run basic test of OpenVSwitch driver.
"""
class test_ping(InfrastructureInit):
def test(self, test, params, env):
count = params.get("ping_count", 10)
for mvm in self.mvms:
for p_mvm in self.mvms:
addr = None
if p_mvm.is_virtual():
addr = p_mvm.virtnet[1].ip["ipv6"][0]
else:
addr = p_mvm.addrs[self.br0_name]["ipv6"][0]
if p_mvm.is_virtual():
mvm.ping(addr, 1, count)
else:
mvm.ping(addr, self.br0_name, count)
class test_iperf(InfrastructureInit):
def start_servers(self):
utils_misc.ForAllP(
self.machines).cmd_in_src("%s -s &> /dev/null &" %
(self.iperf_b_path))
utils_misc.ForAllP(
self.machines).cmd_in_src("%s -s -u &> /dev/null &" %
(self.iperf_b_path))
def iperf_client(self, machine, server_ip, add_params):
out = machine.cmd_in_src("%s -c %s %s" %
(self.iperf_b_path,
server_ip,
add_params))
return " ".join(out.splitlines()[-1].split()[6:8])
def test_bandwidth(self, add_params=None):
if add_params is None:
add_params = ""
speeds = []
speeds.append(self.iperf_client(self.mvms[0],
self.host.addrs[
self.br0_name]["ipv4"][0],
add_params))
speeds.append(self.iperf_client(self.host,
self.mvms[0].virtnet[
1].ip["ipv4"][0],
add_params))
speeds.append(self.iperf_client(self.mvms[0],
self.mvms[1].virtnet[
1].ip["ipv4"][0],
add_params))
return speeds
def test(self, test, params, env):
iperf_src_path = os.path.join(data_dir.get_deps_dir(), "iperf")
self.iperf_b_path = os.path.join("iperf-2.0.4", "src", "iperf")
error.context("Install iperf to vms machine.")
utils_misc.ForAllP(
self.machines).compile_autotools_app_tar(iperf_src_path,
"iperf-2.0.4.tar.gz")
allow_iperf_firewall(self.host)
utils_misc.ForAllP(self.mvms).cmd("iptables -F")
self.start_servers()
# Test TCP bandwidth
error.context("Test iperf bandwidth tcp.")
speeds = self.test_bandwidth()
logging.info("TCP Bandwidth from vm->host: %s", speeds[0])
logging.info("TCP Bandwidth from host->vm: %s", speeds[1])
logging.info("TCP Bandwidth from vm->vm: %s", speeds[2])
# test udp bandwidth limited to 1Gb
error.context("Test iperf bandwidth udp.")
speeds = self.test_bandwidth("-u -b 1G")
logging.info("UDP Bandwidth from vm->host: %s", speeds[0])
logging.info("UDP Bandwidth from host->vm: %s", speeds[1])
logging.info("UDP Bandwidth from vm->vm: %s", speeds[2])
def clean(self, test, params, env):
self.host.cmd("killall -9 iperf")
super(test_iperf, self).clean(test, params, env)
class test_vlan_ping(InfrastructureInit):
def test(self, test, params, env):
count = params.get("ping_count", 10)
ret = utils_misc.ForAllPSE(self.mvms).ping(
self.host.addrs[self.br0_name]["ipv6"][0],
1, count)
for ret, vm in zip(ret, self.mvms):
if "exception" in ret:
raise error.TestError("VM %s can't ping host:\n %s" %
(vm.name, ret.exception))
error.context("Add OpenVSwitch device to vlan.")
self.ovs.add_port_tag(self.mvms[0].virtnet[1].ifname, "1")
self.ovs.add_port_tag(self.mvms[1].virtnet[1].ifname, "1")
self.ovs.add_port_tag(self.mvms[2].virtnet[1].ifname, "2")
self.ovs.add_port_tag(self.mvms[3].virtnet[1].ifname, "2")
error.context("Ping all devices in vlan.")
self.mvms[2].ping(self.mvms[3].virtnet[1].ip["ipv6"][0], 1, 2)
self.mvms[3].ping(self.mvms[2].virtnet[1].ip["ipv6"][0], 1, 2)
self.mvms[0].ping(self.mvms[1].virtnet[1].ip["ipv6"][0], 1, 1)
self.mvms[1].ping(self.mvms[0].virtnet[1].ip["ipv6"][0], 1, 1)
try:
self.mvms[0].ping(self.mvms[2].virtnet[1].ip["ipv6"][0],
1, 2)
raise error.TestError("VM %s can't ping host:\n %s" %
(vm.name, ret.exception))
except (error.CmdError, aexpect.ShellError):
pass
self.mvms[0].add_vlan_iface(self.mvms[0].virtnet[1].g_nic_name, 1)
self.mvms[0].add_vlan_iface(self.mvms[0].virtnet[1].g_nic_name, 2)
self.ovs.add_port_tag(self.mvms[0].virtnet[1].ifname, "[]")
self.ovs.add_port_trunk(self.mvms[0].virtnet[1].ifname, [1, 2])
time.sleep(1)
error.context("Ping all devices in vlan.")
self.mvms[0].ping(self.mvms[1].virtnet[1].ip["ipv6"][0], 1,
count, vlan=1)
self.mvms[0].ping(self.mvms[2].virtnet[1].ip["ipv6"][0], 1,
count, vlan=2)
self.mvms[1].ping(self.mvms[0].virtnet[1].ip["ipv6"][0], 1,
count)
self.mvms[2].ping(self.mvms[0].virtnet[1].ip["ipv6"][0], 1,
count)
try:
self.mvms[0].ping(self.mvms[2].virtnet[1].ip["ipv6"][0],
1, 2)
raise error.TestError("VM %s shouldn't be able to ping"
" host:\n %s" % (vm.name, ret.exception))
except (error.CmdError, aexpect.ShellError):
pass
for i in range(0, 4095, 10):
self.ovs.add_port_tag(self.mvms[0].virtnet[1].ifname, "[]")
self.ovs.add_port_trunk(self.mvms[0].virtnet[1].ifname, [i])
self.ovs.add_port_trunk(self.mvms[0].virtnet[1].ifname,
range(4095))
self.ovs.add_port_trunk(self.mvms[0].virtnet[1].ifname, [1])
self.mvms[0].ping(self.mvms[1].virtnet[1].ip["ipv6"][0], 1,
count, vlan=1)
test_type = "test_" + params.get("test_type")
if (test_type in locals()):
tests_group = locals()[test_type]
tests_group(test, params, env)
else:
raise error.TestFail("Test type '%s' is not defined in"
" OpenVSwitch basic test" % test_type)
| gpl-2.0 | -4,935,766,518,107,801,000 | 39.153846 | 79 | 0.484962 | false |
beyoungwoo/C_glibc_Sample | _Algorithm/ProjectEuler_python/euler_4.py | 1 | 1587 | #!/usr/bin/python -Wall
# -*- coding: utf-8 -*-
"""
<div id="content">
<div style="text-align:center;" class="print"><img src="images/print_page_logo.png" alt="projecteuler.net" style="border:none;" /></div>
<h2>Largest palindrome product</h2><div id="problem_info" class="info"><h3>Problem 4</h3><span>Published on Friday, 16th November 2001, 06:00 pm; Solved by 254694; Difficulty rating: 5%</span></div>
<div class="problem_content" role="problem">
<p>A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 * 99.</p>
<p>Find the largest palindrome made from the product of two 3-digit numbers.</p>
</div><br />
<br /></div>
"""
# 999 * 999 = 998001
# 998 comp 100
def L_comp(n, s):
if (n == 2) :
if (s[0] == s[3] and s[1] == s[2]) :
return True
else :
return False
elif (n == 3) :
if (s[0] == s[5] and s[1] == s[4] and s[2] == s[3]) :
return True
else :
return False
def L_mutiple(n, max_num):
max_range = max_num -1
min_range = max_num /2
for i in range(max_range, min_range, -1):
for j in range(max_range, min_range, -1):
ret = i * j
s = "%d" % (ret)
result = L_comp(n, s)
if (result):
return ret
return -1
def L_plaindrome(n):
if (n != 2 and n != 3):
print "invalid input"
return -1
max_num = 1
for i in range (0, n):
max_num *= 10
return L_mutiple(n, max_num)
print L_plaindrome(3)
| gpl-3.0 | -6,911,959,164,782,817,000 | 28.388889 | 198 | 0.555766 | false |
michal-ruzicka/archivematica | src/MCPClient/lib/clientScripts/restructureForComplianceMaildir.py | 1 | 1919 | #!/usr/bin/python -OO
# This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage archivematicaClientScript
# @author Joseph Perry <joseph@artefactual.com>
import os
import sys
import shutil
requiredDirectories = ["logs", "logs/fileMeta", "metadata", "metadata/submissionDocumentation", "objects", "objects/Maildir"]
optionalFiles = "processingMCP.xml"
def restructureMaildirDirectory(unitPath):
for dir in requiredDirectories:
dirPath = os.path.join(unitPath, dir)
if not os.path.isdir(dirPath):
os.mkdir(dirPath)
print "creating: ", dir
for item in os.listdir(unitPath):
dst = os.path.join(unitPath, "objects", "Maildir") + "/."
itemPath = os.path.join(unitPath, item)
if os.path.isdir(itemPath) and item not in requiredDirectories:
shutil.move(itemPath, dst)
print "moving directory to objects/Maildir: ", item
elif os.path.isfile(itemPath) and item not in optionalFiles:
shutil.move(itemPath, dst)
print "moving file to objects/Maildir: ", item
if __name__ == '__main__':
target = sys.argv[1]
restructureMaildirDirectory(target)
| agpl-3.0 | 8,630,748,113,008,248,000 | 36.627451 | 125 | 0.705576 | false |
gopaycommunity/gopay-python-api | tests/unit/utils.py | 1 | 1531 | from gopay.enums import PaymentInstrument, BankSwiftCode, Currency, Language
class Utils:
GO_ID = '8712700986'
CLIENT_ID = '1689337452'
CLIENT_SECRET = 'CKr7FyEE'
CLIENT_ID_EET = "1365575992"
CLIENT_SECRET_EET = "NUVsrv4W"
GO_ID_EET = '8289213768'
@staticmethod
def create_base_payment():
base_payment = {
'payer': {
'allowed_payment_instruments': [PaymentInstrument.BANK_ACCOUNT, PaymentInstrument.PAYMENT_CARD],
'allowed_swifts': [BankSwiftCode.CESKA_SPORITELNA, BankSwiftCode.RAIFFEISENBANK],
#'default_swift': BankSwiftCode.CESKA_SPORITELNA,
#'default_payment_instrument': PaymentInstrument.BANK_ACCOUNT,
'contact': {
'email': 'test.test@gopay.cz',
},
},
'order_number': '6789',
'amount': '1900',
'currency': Currency.CZECH_CROWNS,
'order_description': '6789Description',
'lang': Language.CZECH, # if lang is not specified, then default lang is used
'additional_params': [
{'name': 'AdditionalKey', 'value': 'AdditionalValue'}
],
'items': [
{'name': 'Item01', 'amount': '1900', 'count' : '1'},
],
'callback': {
'return_url': 'https://eshop123.cz/return',
'notification_url': 'https://eshop123.cz/notify'
},
}
return base_payment
| mit | -2,250,619,758,455,118,600 | 35.452381 | 112 | 0.536251 | false |
ludwiktrammer/odoo | addons/sale/sale.py | 1 | 42630 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from openerp import SUPERUSER_ID
from openerp import api, fields, models, _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import UserError
from openerp.tools import float_is_zero, float_compare, DEFAULT_SERVER_DATETIME_FORMAT
class res_company(models.Model):
_inherit = "res.company"
sale_note = fields.Text(string='Default Terms and Conditions', translate=True)
class SaleOrder(models.Model):
_name = "sale.order"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_description = "Sales Order"
_order = 'date_order desc, id desc'
@api.depends('order_line.price_total')
def _amount_all(self):
"""
Compute the total amounts of the SO.
"""
for order in self:
amount_untaxed = amount_tax = 0.0
for line in order.order_line:
amount_untaxed += line.price_subtotal
amount_tax += line.price_tax
order.update({
'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),
'amount_tax': order.pricelist_id.currency_id.round(amount_tax),
'amount_total': amount_untaxed + amount_tax,
})
@api.depends('state', 'order_line.invoice_status')
def _get_invoiced(self):
"""
Compute the invoice status of a SO. Possible statuses:
- no: if the SO is not in status 'sale' or 'done', we consider that there is nothing to
invoice. This is also hte default value if the conditions of no other status is met.
- to invoice: if any SO line is 'to invoice', the whole SO is 'to invoice'
- invoiced: if all SO lines are invoiced, the SO is invoiced.
- upselling: if all SO lines are invoiced or upselling, the status is upselling.
The invoice_ids are obtained thanks to the invoice lines of the SO lines, and we also search
for possible refunds created directly from existing invoices. This is necessary since such a
refund is not directly linked to the SO.
"""
for order in self:
invoice_ids = order.order_line.mapped('invoice_lines').mapped('invoice_id')
# Search for refunds as well
refund_ids = self.env['account.invoice'].browse()
if invoice_ids:
refund_ids = refund_ids.search([('type', '=', 'out_refund'), ('origin', 'in', invoice_ids.mapped('number')), ('origin', '!=', False)])
line_invoice_status = [line.invoice_status for line in order.order_line]
if order.state not in ('sale', 'done'):
invoice_status = 'no'
elif any(invoice_status == 'to invoice' for invoice_status in line_invoice_status):
invoice_status = 'to invoice'
elif all(invoice_status == 'invoiced' for invoice_status in line_invoice_status):
invoice_status = 'invoiced'
elif all(invoice_status in ['invoiced', 'upselling'] for invoice_status in line_invoice_status):
invoice_status = 'upselling'
else:
invoice_status = 'no'
order.update({
'invoice_count': len(set(invoice_ids.ids + refund_ids.ids)),
'invoice_ids': invoice_ids.ids + refund_ids.ids,
'invoice_status': invoice_status
})
@api.model
def _default_note(self):
return self.env.user.company_id.sale_note
@api.model
def _get_default_team(self):
default_team_id = self.env['crm.team']._get_default_team_id()
return self.env['crm.team'].browse(default_team_id)
@api.onchange('fiscal_position_id')
def _compute_tax_id(self):
"""
Trigger the recompute of the taxes if the fiscal position is changed on the SO.
"""
for order in self:
order.order_line._compute_tax_id()
name = fields.Char(string='Order Reference', required=True, copy=False, readonly=True, index=True, default=lambda self: _('New'))
origin = fields.Char(string='Source Document', help="Reference of the document that generated this sales order request.")
client_order_ref = fields.Char(string='Customer Reference', copy=False)
state = fields.Selection([
('draft', 'Quotation'),
('sent', 'Quotation Sent'),
('sale', 'Sale Order'),
('done', 'Done'),
('cancel', 'Cancelled'),
], string='Status', readonly=True, copy=False, index=True, track_visibility='onchange', default='draft')
date_order = fields.Datetime(string='Order Date', required=True, readonly=True, index=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=False, default=fields.Datetime.now)
validity_date = fields.Date(string='Expiration Date', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]})
create_date = fields.Datetime(string='Creation Date', readonly=True, index=True, help="Date on which sales order is created.")
user_id = fields.Many2one('res.users', string='Salesperson', index=True, track_visibility='onchange', default=lambda self: self.env.user)
partner_id = fields.Many2one('res.partner', string='Customer', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, required=True, change_default=True, index=True, track_visibility='always')
partner_invoice_id = fields.Many2one('res.partner', string='Invoice Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Invoice address for current sales order.")
partner_shipping_id = fields.Many2one('res.partner', string='Delivery Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Delivery address for current sales order.")
pricelist_id = fields.Many2one('product.pricelist', string='Pricelist', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Pricelist for current sales order.")
currency_id = fields.Many2one("res.currency", related='pricelist_id.currency_id', string="Currency", readonly=True, required=True)
project_id = fields.Many2one('account.analytic.account', 'Analytic Account', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="The analytic account related to a sales order.", copy=False, domain=[('account_type', '=', 'normal')])
order_line = fields.One2many('sale.order.line', 'order_id', string='Order Lines', states={'cancel': [('readonly', True)], 'done': [('readonly', True)]}, copy=True)
invoice_count = fields.Integer(string='# of Invoices', compute='_get_invoiced', readonly=True)
invoice_ids = fields.Many2many("account.invoice", string='Invoices', compute="_get_invoiced", readonly=True, copy=False)
invoice_status = fields.Selection([
('upselling', 'Upselling Opportunity'),
('invoiced', 'Fully Invoiced'),
('to invoice', 'To Invoice'),
('no', 'Nothing to Invoice')
], string='Invoice Status', compute='_get_invoiced', store=True, readonly=True, default='no')
note = fields.Text('Terms and conditions', default=_default_note)
amount_untaxed = fields.Monetary(string='Untaxed Amount', store=True, readonly=True, compute='_amount_all', track_visibility='always')
amount_tax = fields.Monetary(string='Taxes', store=True, readonly=True, compute='_amount_all', track_visibility='always')
amount_total = fields.Monetary(string='Total', store=True, readonly=True, compute='_amount_all', track_visibility='always')
payment_term_id = fields.Many2one('account.payment.term', string='Payment Term', oldname='payment_term')
fiscal_position_id = fields.Many2one('account.fiscal.position', oldname='fiscal_position', string='Fiscal Position')
company_id = fields.Many2one('res.company', 'Company', default=lambda self: self.env['res.company']._company_default_get('sale.order'))
team_id = fields.Many2one('crm.team', 'Sales Team', change_default=True, default=_get_default_team, oldname='section_id')
procurement_group_id = fields.Many2one('procurement.group', 'Procurement Group', copy=False)
product_id = fields.Many2one('product.product', related='order_line.product_id', string='Product')
@api.model
def _get_customer_lead(self, product_tmpl_id):
return False
@api.multi
def button_dummy(self):
return True
@api.multi
def unlink(self):
for order in self:
if order.state != 'draft':
raise UserError(_('You can only delete draft quotations!'))
return super(SaleOrder, self).unlink()
@api.multi
def _track_subtype(self, init_values):
self.ensure_one()
if 'state' in init_values and self.state == 'sale':
return 'sale.mt_order_confirmed'
elif 'state' in init_values and self.state == 'sent':
return 'sale.mt_order_sent'
return super(SaleOrder, self)._track_subtype(init_values)
@api.multi
@api.onchange('partner_shipping_id')
def onchange_partner_shipping_id(self):
"""
Trigger the change of fiscal position when the shipping address is modified.
"""
fiscal_position = self.env['account.fiscal.position'].get_fiscal_position(self.partner_id.id, self.partner_shipping_id.id)
if fiscal_position:
self.fiscal_position_id = fiscal_position
return {}
@api.multi
@api.onchange('partner_id')
def onchange_partner_id(self):
"""
Update the following fields when the partner is changed:
- Pricelist
- Payment term
- Invoice address
- Delivery address
"""
if not self.partner_id:
self.update({
'partner_invoice_id': False,
'partner_shipping_id': False,
'payment_term_id': False,
'fiscal_position_id': False,
})
return
addr = self.partner_id.address_get(['delivery', 'invoice'])
values = {
'pricelist_id': self.partner_id.property_product_pricelist and self.partner_id.property_product_pricelist.id or False,
'payment_term_id': self.partner_id.property_payment_term_id and self.partner_id.property_payment_term_id.id or False,
'partner_invoice_id': addr['invoice'],
'partner_shipping_id': addr['delivery'],
'note': self.with_context(lang=self.partner_id.lang).env.user.company_id.sale_note,
}
if self.partner_id.user_id:
values['user_id'] = self.partner_id.user_id.id
if self.partner_id.team_id:
values['team_id'] = self.partner_id.team_id.id
self.update(values)
@api.model
def create(self, vals):
if vals.get('name', 'New') == 'New':
vals['name'] = self.env['ir.sequence'].next_by_code('sale.order') or 'New'
# Makes sure partner_invoice_id', 'partner_shipping_id' and 'pricelist_id' are defined
if any(f not in vals for f in ['partner_invoice_id', 'partner_shipping_id', 'pricelist_id']):
partner = self.env['res.partner'].browse(vals.get('partner_id'))
addr = partner.address_get(['delivery', 'invoice'])
vals['partner_invoice_id'] = vals.setdefault('partner_invoice_id', addr['invoice'])
vals['partner_shipping_id'] = vals.setdefault('partner_shipping_id', addr['delivery'])
vals['pricelist_id'] = vals.setdefault('pricelist_id', partner.property_product_pricelist and partner.property_product_pricelist.id)
result = super(SaleOrder, self).create(vals)
return result
@api.multi
def _prepare_invoice(self):
"""
Prepare the dict of values to create the new invoice for a sales order. This method may be
overridden to implement custom invoice generation (making sure to call super() to establish
a clean extension chain).
"""
self.ensure_one()
journal_id = self.env['account.invoice'].default_get(['journal_id'])['journal_id']
if not journal_id:
raise UserError(_('Please define an accounting sale journal for this company.'))
invoice_vals = {
'name': self.client_order_ref or '',
'origin': self.name,
'type': 'out_invoice',
'account_id': self.partner_invoice_id.property_account_receivable_id.id,
'partner_id': self.partner_invoice_id.id,
'journal_id': journal_id,
'currency_id': self.pricelist_id.currency_id.id,
'comment': self.note,
'payment_term_id': self.payment_term_id.id,
'fiscal_position_id': self.fiscal_position_id.id or self.partner_invoice_id.property_account_position_id.id,
'company_id': self.company_id.id,
'user_id': self.user_id and self.user_id.id,
'team_id': self.team_id.id
}
return invoice_vals
@api.multi
def print_quotation(self):
self.filtered(lambda s: s.state == 'draft').write({'state': 'sent'})
return self.env['report'].get_action(self, 'sale.report_saleorder')
@api.multi
def action_view_invoice(self):
invoice_ids = self.mapped('invoice_ids')
imd = self.env['ir.model.data']
action = imd.xmlid_to_object('account.action_invoice_tree1')
list_view_id = imd.xmlid_to_res_id('account.invoice_tree')
form_view_id = imd.xmlid_to_res_id('account.invoice_form')
result = {
'name': action.name,
'help': action.help,
'type': action.type,
'views': [[list_view_id, 'tree'], [form_view_id, 'form'], [False, 'graph'], [False, 'kanban'], [False, 'calendar'], [False, 'pivot']],
'target': action.target,
'context': action.context,
'res_model': action.res_model,
}
if len(invoice_ids) > 1:
result['domain'] = "[('id','in',%s)]" % invoice_ids.ids
elif len(invoice_ids) == 1:
result['views'] = [(form_view_id, 'form')]
result['res_id'] = invoice_ids.ids[0]
else:
result = {'type': 'ir.actions.act_window_close'}
return result
@api.multi
def action_invoice_create(self, grouped=False, final=False):
"""
Create the invoice associated to the SO.
:param grouped: if True, invoices are grouped by SO id. If False, invoices are grouped by
(partner, currency)
:param final: if True, refunds will be generated if necessary
:returns: list of created invoices
"""
inv_obj = self.env['account.invoice']
precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
invoices = {}
for order in self:
group_key = order.id if grouped else (order.partner_id.id, order.currency_id.id)
for line in order.order_line.sorted(key=lambda l: l.qty_to_invoice < 0):
if float_is_zero(line.qty_to_invoice, precision_digits=precision):
continue
if group_key not in invoices:
inv_data = order._prepare_invoice()
invoice = inv_obj.create(inv_data)
invoices[group_key] = invoice
elif group_key in invoices:
vals = {}
if order.name not in invoices[group_key].origin.split(', '):
vals['origin'] = invoices[group_key].origin + ', ' + order.name
if order.client_order_ref and order.client_order_ref not in invoices[group_key].name.split(', '):
vals['name'] = invoices[group_key].name + ', ' + order.client_order_ref
invoices[group_key].write(vals)
if line.qty_to_invoice > 0:
line.invoice_line_create(invoices[group_key].id, line.qty_to_invoice)
elif line.qty_to_invoice < 0 and final:
line.invoice_line_create(invoices[group_key].id, line.qty_to_invoice)
for invoice in invoices.values():
if not invoice.invoice_line_ids:
raise UserError(_('There is no invoicable line.'))
# If invoice is negative, do a refund invoice instead
if invoice.amount_untaxed < 0:
invoice.type = 'out_refund'
for line in invoice.invoice_line_ids:
line.quantity = -line.quantity
# Use additional field helper function (for account extensions)
for line in invoice.invoice_line_ids:
line._set_additional_fields(invoice)
# Necessary to force computation of taxes. In account_invoice, they are triggered
# by onchanges, which are not triggered when doing a create.
invoice.compute_taxes()
return [inv.id for inv in invoices.values()]
@api.multi
def action_draft(self):
orders = self.filtered(lambda s: s.state in ['cancel', 'sent'])
orders.write({
'state': 'draft',
'procurement_group_id': False,
})
orders.mapped('order_line').mapped('procurement_ids').write({'sale_line_id': False})
@api.multi
def action_cancel(self):
self.write({'state': 'cancel'})
@api.multi
def action_quotation_send(self):
'''
This function opens a window to compose an email, with the edi sale template message loaded by default
'''
self.ensure_one()
ir_model_data = self.env['ir.model.data']
try:
template_id = ir_model_data.get_object_reference('sale', 'email_template_edi_sale')[1]
except ValueError:
template_id = False
try:
compose_form_id = ir_model_data.get_object_reference('mail', 'email_compose_message_wizard_form')[1]
except ValueError:
compose_form_id = False
ctx = dict()
ctx.update({
'default_model': 'sale.order',
'default_res_id': self.ids[0],
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
return {
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form_id, 'form')],
'view_id': compose_form_id,
'target': 'new',
'context': ctx,
}
@api.multi
def force_quotation_send(self):
for order in self:
email_act = order.action_quotation_send()
if email_act and email_act.get('context'):
email_ctx = email_act['context']
email_ctx.update(default_email_from=order.company_id.email)
order.with_context(email_ctx).message_post_with_template(email_ctx.get('default_template_id'))
return True
@api.multi
def action_done(self):
self.write({'state': 'done'})
@api.model
def _prepare_procurement_group(self):
return {'name': self.name}
@api.multi
def action_confirm(self):
for order in self:
order.state = 'sale'
if self.env.context.get('send_email'):
self.force_quotation_send()
order.order_line._action_procurement_create()
if not order.project_id:
for line in order.order_line:
if line.product_id.invoice_policy == 'cost':
order._create_analytic_account()
break
if self.env['ir.values'].get_default('sale.config.settings', 'auto_done_setting'):
self.action_done()
return True
@api.multi
def _create_analytic_account(self, prefix=None):
for order in self:
name = order.name
if prefix:
name = prefix + ": " + order.name
analytic = self.env['account.analytic.account'].create({
'name': name,
'code': order.client_order_ref,
'company_id': order.company_id.id,
'partner_id': order.partner_id.id
})
order.project_id = analytic
@api.multi
def _notification_group_recipients(self, message, recipients, done_ids, group_data):
group_user = self.env.ref('base.group_user')
for recipient in recipients:
if recipient.id in done_ids:
continue
if not recipient.user_ids:
group_data['partner'] |= recipient
else:
group_data['user'] |= recipient
done_ids.add(recipient.id)
return super(SaleOrder, self)._notification_group_recipients(message, recipients, done_ids, group_data)
class SaleOrderLine(models.Model):
_name = 'sale.order.line'
_description = 'Sales Order Line'
_order = 'order_id desc, sequence, id'
@api.depends('state', 'product_uom_qty', 'qty_delivered', 'qty_to_invoice', 'qty_invoiced')
def _compute_invoice_status(self):
"""
Compute the invoice status of a SO line. Possible statuses:
- no: if the SO is not in status 'sale' or 'done', we consider that there is nothing to
invoice. This is also hte default value if the conditions of no other status is met.
- to invoice: we refer to the quantity to invoice of the line. Refer to method
`_get_to_invoice_qty()` for more information on how this quantity is calculated.
- upselling: this is possible only for a product invoiced on ordered quantities for which
we delivered more than expected. The could arise if, for example, a project took more
time than expected but we decided not to invoice the extra cost to the client. This
occurs onyl in state 'sale', so that when a SO is set to done, the upselling opportunity
is removed from the list.
- invoiced: the quantity invoiced is larger or equal to the quantity ordered.
"""
precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
for line in self:
if line.state not in ('sale', 'done'):
line.invoice_status = 'no'
elif not float_is_zero(line.qty_to_invoice, precision_digits=precision):
line.invoice_status = 'to invoice'
elif line.state == 'sale' and line.product_id.invoice_policy == 'order' and\
float_compare(line.qty_delivered, line.product_uom_qty, precision_digits=precision) == 1:
line.invoice_status = 'upselling'
elif float_compare(line.qty_invoiced, line.product_uom_qty, precision_digits=precision) >= 0:
line.invoice_status = 'invoiced'
else:
line.invoice_status = 'no'
@api.depends('product_uom_qty', 'discount', 'price_unit', 'tax_id')
def _compute_amount(self):
"""
Compute the amounts of the SO line.
"""
for line in self:
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = line.tax_id.compute_all(price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_id)
line.update({
'price_tax': taxes['total_included'] - taxes['total_excluded'],
'price_total': taxes['total_included'],
'price_subtotal': taxes['total_excluded'],
})
@api.depends('product_id.invoice_policy', 'order_id.state')
def _compute_qty_delivered_updateable(self):
for line in self:
line.qty_delivered_updateable = line.product_id.invoice_policy in ('order', 'delivery') and line.order_id.state == 'sale' and line.product_id.track_service == 'manual'
@api.depends('qty_invoiced', 'qty_delivered', 'product_uom_qty', 'order_id.state')
def _get_to_invoice_qty(self):
"""
Compute the quantity to invoice. If the invoice policy is order, the quantity to invoice is
calculated from the ordered quantity. Otherwise, the quantity delivered is used.
"""
for line in self:
if line.order_id.state in ['sale', 'done']:
if line.product_id.invoice_policy == 'order':
line.qty_to_invoice = line.product_uom_qty - line.qty_invoiced
else:
line.qty_to_invoice = line.qty_delivered - line.qty_invoiced
else:
line.qty_to_invoice = 0
@api.depends('invoice_lines.invoice_id.state', 'invoice_lines.quantity')
def _get_invoice_qty(self):
"""
Compute the quantity invoiced. If case of a refund, the quantity invoiced is decreased. Note
that this is the case only if the refund is generated from the SO and that is intentional: if
a refund made would automatically decrease the invoiced quantity, then there is a risk of reinvoicing
it automatically, which may not be wanted at all. That's why the refund has to be created from the SO
"""
for line in self:
qty_invoiced = 0.0
for invoice_line in line.invoice_lines:
if invoice_line.invoice_id.state != 'cancel':
if invoice_line.invoice_id.type == 'out_invoice':
qty_invoiced += invoice_line.quantity
elif invoice_line.invoice_id.type == 'out_refund':
qty_invoiced -= invoice_line.quantity
line.qty_invoiced = qty_invoiced
@api.depends('price_subtotal', 'product_uom_qty')
def _get_price_reduce(self):
for line in self:
line.price_reduce = line.price_subtotal / line.product_uom_qty if line.product_uom_qty else 0.0
@api.multi
def _compute_tax_id(self):
for line in self:
fpos = line.order_id.fiscal_position_id or line.order_id.partner_id.property_account_position_id
if fpos:
# The superuser is used by website_sale in order to create a sale order. We need to make
# sure we only select the taxes related to the company of the partner. This should only
# apply if the partner is linked to a company.
if self.env.uid == SUPERUSER_ID and line.order_id.company_id:
taxes = fpos.map_tax(line.product_id.taxes_id).filtered(lambda r: r.company_id == line.order_id.company_id)
else:
taxes = fpos.map_tax(line.product_id.taxes_id)
line.tax_id = taxes
else:
line.tax_id = line.product_id.taxes_id if line.product_id.taxes_id else False
@api.multi
def _prepare_order_line_procurement(self, group_id=False):
self.ensure_one()
return {
'name': self.name,
'origin': self.order_id.name,
'date_planned': datetime.strptime(self.order_id.date_order, DEFAULT_SERVER_DATETIME_FORMAT) + timedelta(days=self.customer_lead),
'product_id': self.product_id.id,
'product_qty': self.product_uom_qty,
'product_uom': self.product_uom.id,
'company_id': self.order_id.company_id.id,
'group_id': group_id,
'sale_line_id': self.id
}
@api.multi
def _action_procurement_create(self):
"""
Create procurements based on quantity ordered. If the quantity is increased, new
procurements are created. If the quantity is decreased, no automated action is taken.
"""
precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
new_procs = self.env['procurement.order'] #Empty recordset
for line in self:
if line.state != 'sale' or not line.product_id._need_procurement():
continue
qty = 0.0
for proc in line.procurement_ids:
qty += proc.product_qty
if float_compare(qty, line.product_uom_qty, precision_digits=precision) >= 0:
continue
if not line.order_id.procurement_group_id:
vals = line.order_id._prepare_procurement_group()
line.order_id.procurement_group_id = self.env["procurement.group"].create(vals)
vals = line._prepare_order_line_procurement(group_id=line.order_id.procurement_group_id.id)
vals['product_qty'] = line.product_uom_qty - qty
new_proc = self.env["procurement.order"].create(vals)
new_procs += new_proc
new_procs.run()
return new_procs
@api.model
def _get_analytic_invoice_policy(self):
return ['cost']
@api.model
def _get_analytic_track_service(self):
return []
@api.model
def create(self, values):
onchange_fields = ['name', 'price_unit', 'product_uom', 'tax_id']
if values.get('order_id') and values.get('product_id') and any(f not in values for f in onchange_fields):
line = self.new(values)
line.product_id_change()
for field in onchange_fields:
if field not in values:
values[field] = line._fields[field].convert_to_write(line[field])
line = super(SaleOrderLine, self).create(values)
if line.state == 'sale':
if (not line.order_id.project_id and
(line.product_id.track_service in self._get_analytic_track_service() or
line.product_id.invoice_policy in self._get_analytic_invoice_policy())):
line.order_id._create_analytic_account()
line._action_procurement_create()
return line
@api.multi
def write(self, values):
lines = False
if 'product_uom_qty' in values:
precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
lines = self.filtered(
lambda r: r.state == 'sale' and float_compare(r.product_uom_qty, values['product_uom_qty'], precision_digits=precision) == -1)
result = super(SaleOrderLine, self).write(values)
if lines:
lines._action_procurement_create()
return result
order_id = fields.Many2one('sale.order', string='Order Reference', required=True, ondelete='cascade', index=True, copy=False)
name = fields.Text(string='Description', required=True)
sequence = fields.Integer(string='Sequence', default=10)
invoice_lines = fields.Many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_line_id', string='Invoice Lines', copy=False)
invoice_status = fields.Selection([
('upselling', 'Upselling Opportunity'),
('invoiced', 'Fully Invoiced'),
('to invoice', 'To Invoice'),
('no', 'Nothing to Invoice')
], string='Invoice Status', compute='_compute_invoice_status', store=True, readonly=True, default='no')
price_unit = fields.Float('Unit Price', required=True, digits=dp.get_precision('Product Price'), default=0.0)
price_subtotal = fields.Monetary(compute='_compute_amount', string='Subtotal', readonly=True, store=True)
price_tax = fields.Monetary(compute='_compute_amount', string='Taxes', readonly=True, store=True)
price_total = fields.Monetary(compute='_compute_amount', string='Total', readonly=True, store=True)
price_reduce = fields.Monetary(compute='_get_price_reduce', string='Price Reduce', readonly=True, store=True)
tax_id = fields.Many2many('account.tax', string='Taxes')
discount = fields.Float(string='Discount (%)', digits=dp.get_precision('Discount'), default=0.0)
product_id = fields.Many2one('product.product', string='Product', domain=[('sale_ok', '=', True)], change_default=True, ondelete='restrict', required=True)
product_uom_qty = fields.Float(string='Quantity', digits=dp.get_precision('Product Unit of Measure'), required=True, default=1.0)
product_uom = fields.Many2one('product.uom', string='Unit of Measure', required=True)
qty_delivered_updateable = fields.Boolean(compute='_compute_qty_delivered_updateable', string='Can Edit Delivered', readonly=True, default=True)
qty_delivered = fields.Float(string='Delivered', copy=False, digits=dp.get_precision('Product Unit of Measure'), default=0.0)
qty_to_invoice = fields.Float(
compute='_get_to_invoice_qty', string='To Invoice', store=True, readonly=True,
digits=dp.get_precision('Product Unit of Measure'), default=0.0)
qty_invoiced = fields.Float(
compute='_get_invoice_qty', string='Invoiced', store=True, readonly=True,
digits=dp.get_precision('Product Unit of Measure'), default=0.0)
salesman_id = fields.Many2one(related='order_id.user_id', store=True, string='Salesperson', readonly=True)
currency_id = fields.Many2one(related='order_id.currency_id', store=True, string='Currency', readonly=True)
company_id = fields.Many2one(related='order_id.company_id', string='Company', store=True, readonly=True)
order_partner_id = fields.Many2one(related='order_id.partner_id', store=True, string='Customer')
state = fields.Selection([
('draft', 'Quotation'),
('sent', 'Quotation Sent'),
('sale', 'Sale Order'),
('done', 'Done'),
('cancel', 'Cancelled'),
], related='order_id.state', string='Order Status', readonly=True, copy=False, store=True, default='draft')
customer_lead = fields.Float(
'Delivery Lead Time', required=True, default=0.0,
help="Number of days between the order confirmation and the shipping of the products to the customer", oldname="delay")
procurement_ids = fields.One2many('procurement.order', 'sale_line_id', string='Procurements')
@api.multi
def _prepare_invoice_line(self, qty):
"""
Prepare the dict of values to create the new invoice line for a sales order line.
:param qty: float quantity to invoice
"""
self.ensure_one()
res = {}
account = self.product_id.property_account_income_id or self.product_id.categ_id.property_account_income_categ_id
if not account:
raise UserError(_('Please define income account for this product: "%s" (id:%d) - or for its category: "%s".') % \
(self.product_id.name, self.product_id.id, self.product_id.categ_id.name))
fpos = self.order_id.fiscal_position_id or self.order_id.partner_id.property_account_position_id
if fpos:
account = fpos.map_account(account)
res = {
'name': self.name,
'sequence': self.sequence,
'origin': self.order_id.name,
'account_id': account.id,
'price_unit': self.price_unit,
'quantity': qty,
'discount': self.discount,
'uom_id': self.product_uom.id,
'product_id': self.product_id.id or False,
'invoice_line_tax_ids': [(6, 0, self.tax_id.ids)],
'account_analytic_id': self.order_id.project_id.id,
}
return res
@api.multi
def invoice_line_create(self, invoice_id, qty):
"""
Create an invoice line. The quantity to invoice can be positive (invoice) or negative
(refund).
:param invoice_id: integer
:param qty: float quantity to invoice
"""
precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
for line in self:
if not float_is_zero(qty, precision_digits=precision):
vals = line._prepare_invoice_line(qty=qty)
vals.update({'invoice_id': invoice_id, 'sale_line_ids': [(6, 0, [line.id])]})
self.env['account.invoice.line'].create(vals)
@api.multi
@api.onchange('product_id')
def product_id_change(self):
if not self.product_id:
return {'domain': {'product_uom': []}}
vals = {}
domain = {'product_uom': [('category_id', '=', self.product_id.uom_id.category_id.id)]}
if not self.product_uom or (self.product_id.uom_id.category_id.id != self.product_uom.category_id.id):
vals['product_uom'] = self.product_id.uom_id
product = self.product_id.with_context(
lang=self.order_id.partner_id.lang,
partner=self.order_id.partner_id.id,
quantity=self.product_uom_qty,
date=self.order_id.date_order,
pricelist=self.order_id.pricelist_id.id,
uom=self.product_uom.id
)
name = product.name_get()[0][1]
if product.description_sale:
name += '\n' + product.description_sale
vals['name'] = name
self._compute_tax_id()
if self.order_id.pricelist_id and self.order_id.partner_id:
vals['price_unit'] = self.env['account.tax']._fix_tax_included_price(product.price, product.taxes_id, self.tax_id)
self.update(vals)
return {'domain': domain}
@api.onchange('product_uom', 'product_uom_qty')
def product_uom_change(self):
if not self.product_uom:
self.price_unit = 0.0
return
if self.order_id.pricelist_id and self.order_id.partner_id:
product = self.product_id.with_context(
lang=self.order_id.partner_id.lang,
partner=self.order_id.partner_id.id,
quantity=self.product_uom_qty,
date_order=self.order_id.date_order,
pricelist=self.order_id.pricelist_id.id,
uom=self.product_uom.id,
fiscal_position=self.env.context.get('fiscal_position')
)
self.price_unit = self.env['account.tax']._fix_tax_included_price(product.price, product.taxes_id, self.tax_id)
@api.multi
def unlink(self):
if self.filtered(lambda x: x.state in ('sale', 'done')):
raise UserError(_('You can not remove a sale order line.\nDiscard changes and try setting the quantity to 0.'))
return super(SaleOrderLine, self).unlink()
@api.multi
def _get_delivered_qty(self):
'''
Intended to be overridden in sale_stock and sale_mrp
:return: the quantity delivered
:rtype: float
'''
return 0.0
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
@api.multi
def send_mail(self, auto_commit=False):
if self._context.get('default_model') == 'sale.order' and self._context.get('default_res_id') and self._context.get('mark_so_as_sent'):
order = self.env['sale.order'].browse([self._context['default_res_id']])
if order.state == 'draft':
order.state = 'sent'
return super(MailComposeMessage, self.with_context(mail_post_autofollow=True)).send_mail(auto_commit=auto_commit)
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.model
def _get_default_team(self):
default_team_id = self.env['crm.team']._get_default_team_id()
return self.env['crm.team'].browse(default_team_id)
team_id = fields.Many2one('crm.team', string='Sales Team', default=_get_default_team, oldname='section_id')
@api.multi
def confirm_paid(self):
res = super(AccountInvoice, self).confirm_paid()
todo = set()
for invoice in self:
for line in invoice.invoice_line_ids:
for sale_line in line.sale_line_ids:
todo.add((sale_line.order_id, invoice.number))
for (order, name) in todo:
order.message_post(body=_("Invoice %s paid") % (name))
return res
class AccountInvoiceLine(models.Model):
_inherit = 'account.invoice.line'
sale_line_ids = fields.Many2many('sale.order.line', 'sale_order_line_invoice_rel', 'invoice_line_id', 'order_line_id', string='Sale Order Lines', readonly=True, copy=False)
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
sale_line_id = fields.Many2one('sale.order.line', string='Sale Order Line')
class ProductProduct(models.Model):
_inherit = 'product.product'
@api.multi
def _sales_count(self):
r = {}
domain = [
('state', 'in', ['sale', 'done']),
('product_id', 'in', self.ids),
]
for group in self.env['sale.report'].read_group(domain, ['product_id', 'product_uom_qty'], ['product_id']):
r[group['product_id'][0]] = group['product_uom_qty']
for product in self:
product.sales_count = r.get(product.id, 0)
return r
sales_count = fields.Integer(compute='_sales_count', string='# Sales')
class ProductTemplate(models.Model):
_inherit = 'product.template'
track_service = fields.Selection([('manual', 'Manually set quantities on order')], string='Track Service', default='manual')
@api.multi
@api.depends('product_variant_ids.sales_count')
def _sales_count(self):
for product in self:
product.sales_count = sum([p.sales_count for p in product.product_variant_ids])
@api.multi
def action_view_sales(self):
self.ensure_one()
action = self.env.ref('sale.action_product_sale_list')
product_ids = self.product_variant_ids.ids
return {
'name': action.name,
'help': action.help,
'type': action.type,
'view_type': action.view_type,
'view_mode': action.view_mode,
'target': action.target,
'context': "{'default_product_id': " + str(product_ids[0]) + "}",
'res_model': action.res_model,
'domain': [('state', 'in', ['sale', 'done']), ('product_id.product_tmpl_id', '=', self.id)],
}
sales_count = fields.Integer(compute='_sales_count', string='# Sales')
invoice_policy = fields.Selection(
[('order', 'Ordered quantities'),
('delivery', 'Delivered quantities'),
('cost', 'Invoice based on time and material')],
string='Invoicing Policy', default='order')
| agpl-3.0 | -3,618,811,220,402,187,000 | 46.314095 | 276 | 0.607718 | false |
sk413025/tilitools | ocsvm.py | 1 | 3767 | from cvxopt import matrix,spmatrix,sparse
from cvxopt.blas import dot,dotu
from cvxopt.solvers import qp
import numpy as np
from kernel import Kernel
class OCSVM:
"""One-class support vector machine
'Estimating the support of a high-dimensional distribution.',
Sch\"{o}lkopf, B and Platt, J C and Shawe-Taylor, J and Smola, a J and Williamson, R C,
Microsoft, 1999
"""
MSG_ERROR = -1 # (scalar) something went wrong
MSG_OK = 0 # (scalar) everything alright
PRECISION = 10**-3 # important: effects the threshold, support vectors and speed!
kernel = [] # (matrix) our training kernel
samples = -1 # (scalar) amount of training data in X
C = 1.0 # (scalar) the regularization constant > 0
isDualTrained = False # (boolean) indicates if the oc-svm was trained in dual space
alphas = [] # (vector) dual solution vector
svs = [] # (vector) support vector indices
threshold = 0.0 # (scalar) the optimized threshold (rho)
def __init__(self, kernel, C=1.0):
self.kernel = kernel
self.C = C
(self.samples,foo) = kernel.size
print('Creating new one-class svm with {0} samples and C={1}.'.format(self.samples,C))
def train_dual(self):
"""Trains an one-class svm in dual with kernel."""
if (self.samples<1):
print('Invalid training data.')
return OCSVM.MSG_ERROR
# number of training examples
N = self.samples
# generate a kernel matrix
P = self.kernel
# there is no linear part of the objective
q = matrix(0.0, (N,1))
# sum_i alpha_i = A alpha = b = 1.0
A = matrix(1.0, (1,N))
b = matrix(1.0, (1,1))
# 0 <= alpha_i <= h = C
G1 = spmatrix(1.0, range(N), range(N))
G = sparse([G1,-G1])
h1 = matrix(self.C, (N,1))
h2 = matrix(0.0, (N,1))
h = matrix([h1,h2])
sol = qp(P,-q,G,h,A,b)
# mark dual as solved
self.isDualTrained = True
# store solution
self.alphas = sol['x']
# find support vectors
self.svs = []
for i in range(N):
if self.alphas[i]>OCSVM.PRECISION:
self.svs.append(i)
# find support vectors with alpha < C for threshold calculation
#self.threshold = 10**8
#flag = False
#for i in self.svs:
# if self.alphas[i]<(C-OCSVM.PRECISION) and flag==False:
# (self.threshold, MSG) = self.apply_dual(self.kernel[i,self.svs])
# flag=True
# break
# no threshold set yet?
#if (flag==False):
# (thres, MSG) = self.apply_dual(self.kernel[self.svs,self.svs])
# self.threshold = matrix(max(thres))
(thres, MSG) = self.apply_dual(self.kernel[self.svs,self.svs])
self.threshold = matrix(max(thres))
T = np.single(self.threshold)
cnt = 0
for i in range(len(self.svs)):
if thres[i,0]<(T-OCSVM.PRECISION):
cnt += 1
#print(self.alphas)
print('Found {0} support vectors. {1} of them are outliers.'.format(len(self.svs),cnt))
print('Threshold is {0}'.format(self.threshold))
return OCSVM.MSG_OK
def get_threshold(self):
return self.threshold
def get_support_dual(self):
return self.svs
def get_alphas(self):
return self.alphas
def get_support_dual_values(self):
return self.alphas[self.svs]
def set_train_kernel(self,kernel):
(dim1,dim2) = kernel.size
if (dim1!=dim2 and dim1!=self.samples):
print('(Kernel) Wrong format.')
return OCSVM.MSG_ERROR
self.kernel = kernel;
return OCSVM.MSG_OK
def apply_dual(self, kernel):
"""Application of a dual trained oc-svm."""
# number of training examples
N = self.samples
# check number and dims of test data
(tN,foo) = kernel.size
if (tN<1):
print('Invalid test data')
return 0, OCSVM.MSG_ERROR
if (self.isDualTrained!=True):
print('First train, then test.')
return 0, OCSVM.MSG_ERROR
# apply trained classifier
res = matrix([dotu(kernel[i,:],self.alphas[self.svs]) for i in range(tN)])
return res, OCSVM.MSG_OK
| mit | -5,839,695,781,475,350,000 | 24.452703 | 89 | 0.663127 | false |
jianrongdeng/LAMOST | ana/scripts/filesIO.py | 1 | 9324 | """
============================
script: filesIO.py
============================
date: 20170615 by Jianrong Deng
purpose:
handle input / output files
various data I/O functions
Input: input dir, date, time
"""
import const
import pickle
import os
#==========================
def getDir (path=const.test_path_out, date=const.test_date, datatype=const.test_datatype):
"""
purpose: name scheme for net data (= raw - overscan - bias) file
"""
dir = path +'/' + date + '/' + datatype
return dir
#==========================
#==========================
def getFilename (path=const.test_path_out, date=const.test_date, datatype=const.test_datatype, det=const.test_det, time=const.test_time[0], tag = '-sub_overscan-sub_bias', postfix = '.fit'):
"""
purpose: name scheme for net data (= raw - overscan - bias) file
"""
filename = path +'/' + date +'/' + datatype +'/' + det +'-' + time +'-' + tag + postfix
return filename
#==========================
def setOutFilename(rawfile, d_tag='stat'):
"""
purpose: set output filename using environment variables
input: rawfile: filename of raw data
output: output filename
"""
# info from input filenames
d_path_in = os.environ['env_rawdata_onlypath'] # get environment variable
d_date = get_date(d_path_in) # get date
d_type=get_datatype(d_path_in)
d_det = get_det(rawfile)
d_time = get_time(rawfile)
# setup output file directory and names
d_path_out = os.environ['env_path_out'] # get environment variable
os.system('mkdir -p {}'.format(getDir(path=d_path_out, date=d_date, datatype=d_type))) # create output directory if not already exists
file_out = getFilename(path=d_path_out, date=d_date,det=d_det,time=d_time, tag = d_tag, postfix='.dat')
return file_out
#==========================
#==========================
def setFilename(infile, in_tag='stat.dat', out_tag='clusters.dat'):
"""
purpose: set output filename using environment variables
input: infile: the input filename
in_tag: tag of the input file
out_tag: tag of the output file
output: output filename
"""
in_len = len(infile)
file_out = infile[0:in_len-len(in_tag)]
file_out = file_out + out_tag
return file_out
#==========================
def getMaskFilename(path=const.test_path_out, date=const.test_date, datatype=const.test_datatype, det=const.test_det, time=const.test_time[0], tag = '-3sigma_mask', postfix = '.fit'):
"""
purpose: name scheme for 3sigma-mask file
"""
filename = path + date + datatype + det + time + tag + postfix
return filename
#==========================
def getClusterFilename(path=const.test_path_out, date=const.test_date, datatype=const.test_datatype, det=const.test_det, time=const.test_time[0], tag = '-cluster', postfix = '.dat'):
"""
purpose: name scheme for 3sigma-mask file
"""
filename = path + date + datatype + det + time + tag + postfix
return filename
#============================
#============================
def dumpPixelLists(file_out, pixelLists, DEBUG = const.DEBUG):
#============================
"""
purpose: save pixel Lists to output file
input : filename and pixellist
"""
# save list to output file
try:
with open(file_out, "wb") as data:
pickle.dump(pixelLists, data)
except IOError as err:
print('File error: ', + str(err))
except pickle.pickleError as perr:
print('picklingerror:' + str(perr))
if DEBUG: printPixelLists(pixelLists)
return
#============================
#============================
def loadPixelLists(file_out, DEBUG = const.DEBUG):
#============================
"""
purpose: load pixel List from file
input : filename
output : pixellists
"""
# save list to output file
try:
with open(file_out, "rb") as data:
pixelLists = pickle.load(data)
except IOError as err:
print('File error: ', + str(err))
except pickle.pickleError as perr:
print('picklingerror:' + str(perr))
if DEBUG: printPixelLists(pixelLists)
return pixelLists
#============================
#============================
def printPixelLists(pixelLists, DEBUG = const.DEBUG_L2):
#============================
"""
purpose: print candidate pixel List
input : pixelLists
"""
print('number of images: ', len(pixelLists))
for im in pixelLists: # loop through five images
print('number of candiate pixels (clusters) in the image: ', len(im))
if DEBUG:
for ip in im:
print (ip[0], ip[1], int(ip[2]))
return
#============================
#============================
def dumpStat(file_stat, stat, DEBUG = const.DEBUG_L2):
#============================
"""
purpose: save stat info (mean and sstd ) to output file
input : file_stat and data stat(mean and sstd)
"""
try:
with open(file_stat, "wb") as data:
pickle.dump(stat, data)
except IOError as err:
print('File error: ', + str(err))
except pickle.pickleError as perr:
print('picklingerror:' + str(perr))
if DEBUG:
printStats (stat)
return
#============================
#============================
def loadStat(file_stat, DEBUG = const.DEBUG):
#============================
"""
purpose: save stat info (mean and sstd ) to output file
input : file_stat and data stat(mean and sstd)
"""
try:
with open(file_stat, "rb") as data:
stat = pickle.load(data)
except IOError as err:
print('File error: ', + str(err))
except pickle.pickleError as perr:
print('picklingerror:' + str(perr))
if DEBUG:
printStats (stat)
return stat
#============================
#============================
def printStats(stats):
#============================
"""
purpose: print stat info (mean and sstd )
input : data stat(mean and sstd)
"""
print ('image stat where [0-4] is real data, [5] is bias medium')
for ist in range(len(stats)):
print ('image :', ist, 'mean =', stats[ist][0], ', sstd =', stats[ist][1])
return
#============================
#============================
def get_onlyrawfilenames(DEBUG=const.DEBUG_L2):
"""
purpose: get rawfilenames from environment variables
"""
if DEBUG: # in debug mode, check if file exists
os.system("${env_rawdata_onlypath:?}") # ${variable:?} check if the variable is set
os.system("ls -l ${env_rawdata_onlypath:?}/${env_rawdata_onlyfilenames_0:?}")
os.system("ls -l ${env_rawdata_onlypath:?}/${env_rawdata_onlyfilenames_1:?}")
rawfiles=[]
rawfiles.append( os.environ['env_rawdata_onlyfilenames_0'])
rawfiles.append( os.environ['env_rawdata_onlyfilenames_1'])
rawfiles.append( os.environ['env_rawdata_onlyfilenames_2'])
rawfiles.append( os.environ['env_rawdata_onlyfilenames_3'])
rawfiles.append( os.environ['env_rawdata_onlyfilenames_4'])
return rawfiles
#============================
#============================
def get_rawfilenames(DEBUG=const.DEBUG_L2):
"""
purpose: get rawfilenames (with pathname) from environment variables
output: rawfilenames with pathname
"""
path= os.environ['env_rawdata_onlypath']
rawfiles= get_onlyrawfilenames()
for ir in range(len(rawfiles)):
rawfiles[ir]=path + '/' + rawfiles[ir]
return rawfiles
#============================
#============================
def get_det(filename):
"""
purpose: strip the time stamps from filenames
"""
temp = filename.strip().split('-')
det=temp[0]+'-' + temp[1]
return det
#============================
#============================
def get_times(filenames):
"""
purpose: strip the time stamps from filenames
"""
times = []
for ifile in filenames:
times.append(get_time(ifile))
return times
#============================
#============================
def get_time(filename):
"""
purpose: strip the time stamp from the filename
"""
temp = filename.strip().split('-')
return temp[2]
#============================
#============================
def get_date(pathname, DEBUG=const.DEBUG_L2):
"""
purpose: strip the date stamps from pathname
"""
temp = pathname.strip().split('/')
date = temp[3]
if DEBUG:
print('pathname = ', pathname, '\t date =', date)
return date
#============================
#============================
def get_datatype(pathname):
"""
purpose: strip the data type info from pathname
"""
temp = pathname.strip().split('/')
return temp[4]
#============================
#============================
class filename_rawdata:
"""
purpose: filename class for rawdata
"""
#============================
def __init__(self, a_det, a_dType, a_date, a_times=[]):
"""
purpose: initialization
"""
self.det = a_det
self.dType = a_dType
self.date = a_date
self.times = a_times
#============================
| gpl-3.0 | -551,796,630,725,550,400 | 28.506329 | 191 | 0.525097 | false |
fbzhong/sublime-closure-linter | gjslint.py | 1 | 4010 | import os
import re
import shutil
import sublime
import sublime_plugin
from const import *
from listener import *
from statusprocess import *
from asyncprocess import *
class ShowClosureLinterResultCommand(sublime_plugin.WindowCommand):
"""show closure linter result"""
def run(self):
self.window.run_command("show_panel", {"panel": "output."+RESULT_VIEW_NAME})
class ClosureLinterCommand(sublime_plugin.WindowCommand):
def run(self):
s = sublime.load_settings(SETTINGS_FILE)
file_path = self.window.active_view().file_name()
file_name = os.path.basename(file_path)
self.debug = s.get('debug', False)
self.buffered_data = ''
self.file_path = file_path
self.file_name = file_name
self.is_running = True
self.tests_panel_showed = False
self.ignored_error_count = 0
self.ignore_errors = s.get('ignore_errors', [])
self.init_tests_panel()
cmd = '"' + s.get('gjslint_path', 'jslint') + '" ' + s.get('gjslint_flags', '') + ' "' + file_path + '"'
if self.debug:
print "DEBUG: " + str(cmd)
AsyncProcess(cmd, self)
StatusProcess('Starting Closure Linter for file ' + file_name, self)
ClosureLinterEventListener.disabled = True
def init_tests_panel(self):
if not hasattr(self, 'output_view'):
self.output_view = self.window.get_output_panel(RESULT_VIEW_NAME)
self.output_view.set_name(RESULT_VIEW_NAME)
self.clear_test_view()
self.output_view.settings().set("file_path", self.file_path)
def show_tests_panel(self):
if self.tests_panel_showed:
return
self.window.run_command("show_panel", {"panel": "output."+RESULT_VIEW_NAME})
self.tests_panel_showed = True
def clear_test_view(self):
self.output_view.set_read_only(False)
edit = self.output_view.begin_edit()
self.output_view.erase(edit, sublime.Region(0, self.output_view.size()))
self.output_view.end_edit(edit)
self.output_view.set_read_only(True)
def append_data(self, proc, data, end=False):
self.buffered_data = self.buffered_data + data.decode("utf-8")
data = self.buffered_data.replace(self.file_path, self.file_name).replace('\r\n', '\n').replace('\r', '\n')
if end == False:
rsep_pos = data.rfind('\n')
if rsep_pos == -1:
# not found full line.
return
self.buffered_data = data[rsep_pos+1:]
data = data[:rsep_pos+1]
# ignore error.
text = data
if len(self.ignore_errors) > 0:
text = ''
for line in data.split('\n'):
if len(line) == 0:
continue
ignored = False
for rule in self.ignore_errors:
if re.search(rule, line):
ignored = True
self.ignored_error_count += 1
if self.debug:
print "text match line "
print "rule = " + rule
print "line = " + line
print "---------"
break
if ignored == False:
text += line + '\n'
self.show_tests_panel()
selection_was_at_end = (len(self.output_view.sel()) == 1 and self.output_view.sel()[0] == sublime.Region(self.output_view.size()))
self.output_view.set_read_only(False)
edit = self.output_view.begin_edit()
self.output_view.insert(edit, self.output_view.size(), text)
if end:
text = '\nclosure linter: ignored ' + str(self.ignored_error_count) + ' errors.\n'
self.output_view.insert(edit, self.output_view.size(), text)
# if selection_was_at_end:
# self.output_view.show(self.output_view.size())
self.output_view.end_edit(edit)
self.output_view.set_read_only(True)
# if end:
# self.output_view.run_command("goto_line", {"line": 1})
def update_status(self, msg, progress):
sublime.status_message(msg + " " + progress)
def proc_terminated(self, proc):
if proc.returncode == 0:
msg = self.file_name + ' lint free!'
else:
msg = ''
self.append_data(proc, msg, True)
ClosureLinterEventListener.disabled = False
| bsd-3-clause | 6,959,701,586,400,418,000 | 31.33871 | 134 | 0.625436 | false |
qedsoftware/commcare-hq | corehq/apps/userreports/sql/data_source.py | 1 | 5578 | import numbers
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext
from dimagi.utils.decorators.memoized import memoized
from sqlagg.columns import SimpleColumn
from sqlagg.sorting import OrderBy
from corehq.apps.reports.sqlreport import SqlData, DatabaseColumn
from corehq.apps.userreports.decorators import catch_and_raise_exceptions
from corehq.apps.userreports.exceptions import InvalidQueryColumn
from corehq.apps.userreports.mixins import ConfigurableReportDataSourceMixin
from corehq.apps.userreports.reports.sorting import ASCENDING
from corehq.apps.userreports.reports.specs import CalculatedColumn
from corehq.apps.userreports.reports.util import get_expanded_columns
from corehq.apps.userreports.sql.connection import get_engine_id
from corehq.sql_db.connections import connection_manager
class ConfigurableReportSqlDataSource(ConfigurableReportDataSourceMixin, SqlData):
@property
def engine_id(self):
return get_engine_id(self.config)
@property
def filters(self):
return filter(None, [fv.to_sql_filter() for fv in self._filter_values.values()])
@property
def filter_values(self):
return {k: v for fv in self._filter_values.values() for k, v in fv.to_sql_values().items()}
@property
def group_by(self):
# ask each column for its group_by contribution and combine to a single list
return [
group_by for col_id in self.aggregation_columns
for group_by in self._get_db_column_ids(col_id)
]
@property
def order_by(self):
# allow throwing exception if the report explicitly sorts on an unsortable column type
if self._order_by:
return [
OrderBy(order_by, is_ascending=(order == ASCENDING))
for sort_column_id, order in self._order_by
for order_by in self._get_db_column_ids(sort_column_id)
]
elif self.top_level_columns:
try:
return [
OrderBy(order_by, is_ascending=True)
for order_by in self._get_db_column_ids(self.top_level_columns[0].column_id)
]
except InvalidQueryColumn:
pass
return []
@property
def columns(self):
# This explicitly only includes columns that resolve to database queries.
# The name is a bit confusing but is hard to change due to its dependency in SqlData
db_columns = [c for c in self.inner_columns if not isinstance(c, CalculatedColumn)]
fields = {c.slug for c in db_columns}
return db_columns + [
DatabaseColumn('', SimpleColumn(deferred_filter.field))
for deferred_filter in self._deferred_filters.values()
if deferred_filter.field not in fields]
@memoized
@method_decorator(catch_and_raise_exceptions)
def get_data(self, start=None, limit=None):
ret = super(ConfigurableReportSqlDataSource, self).get_data(start=start, limit=limit)
for report_column in self.top_level_db_columns:
report_column.format_data(ret)
for computed_column in self.top_level_computed_columns:
for row in ret:
row[computed_column.column_id] = computed_column.wrapped_expression(row)
return ret
@method_decorator(catch_and_raise_exceptions)
def get_total_records(self):
qc = self.query_context()
for c in self.columns:
# TODO - don't append columns that are not part of filters or group bys
qc.append_column(c.view)
session = connection_manager.get_scoped_session(self.engine_id)
return qc.count(session.connection(), self.filter_values)
@method_decorator(catch_and_raise_exceptions)
def get_total_row(self):
def _clean_total_row(val, col):
if isinstance(val, numbers.Number):
return val
elif col.calculate_total:
return 0
return ''
def _get_relevant_column_ids(col, column_id_to_expanded_column_ids):
return column_id_to_expanded_column_ids.get(col.column_id, [col.column_id])
expanded_columns = get_expanded_columns(self.top_level_columns, self.config)
qc = self.query_context()
for c in self.columns:
qc.append_column(c.view)
session = connection_manager.get_scoped_session(self.engine_id)
totals = qc.totals(
session.connection(),
[
column_id
for col in self.top_level_columns for column_id in _get_relevant_column_ids(col, expanded_columns)
if col.calculate_total
],
self.filter_values
)
total_row = [
_clean_total_row(totals.get(column_id), col)
for col in self.top_level_columns for column_id in _get_relevant_column_ids(
col, expanded_columns
)
]
if total_row and total_row[0] is '':
total_row[0] = ugettext('Total')
return total_row
def _get_db_column_ids(self, column_id):
# for columns that end up being complex queries (e.g. aggregate dates)
# there could be more than one column ID and they may specify aliases
if column_id in self._column_configs:
return self._column_configs[column_id].get_query_column_ids()
else:
# if the column isn't found just treat it as a normal field
return [column_id]
| bsd-3-clause | 1,145,583,826,430,436,100 | 37.736111 | 114 | 0.6436 | false |
andrejbauer/jurij | jurij/graph.py | 1 | 4649 | # -*- encoding: utf-8 -*-
# A very simple implementation of graphs in python, including graphs
# embedded in the plane.
class Graph():
"""A graph stored as an adjacency dictionary."""
def __init__(self, data=None, vertices=None, edges=None,
vertex_labels=None, edge_labels=None):
"""Construct a graph to from the given data.
The object must define methods vertices() and edges() which
return iterators on vertices and edges, respectively. Vertices
are required to be hashable objects while edges are pairs of
vertices."""
if type(data) == dict:
# the graph is given as an adjancency dictionary
self.adjacency = dict([(x,set(ys)) for (x,ys) in data.items()])
elif type(data) in (list, tuple):
# the graph is given as a list of edges
self.adjacency = {}
for (x,y) in data:
self.adjacency[x] = set()
self.adjacency[y] = set()
for (x,y) in data: self.adjacency[x].add(y)
elif data is None:
self.adjacency = {}
if vertices is not None:
for x in vertices: self.adjacency[x] = set()
if edges is not None:
for (x,y) in edges:
if x not in self.adjacency: self.adjacency[x] = set()
if y not in self.adjacency: self.adjacency[y] = set()
self.adjacency[x].add(y)
else:
# the graph is given by an object which can produce
# a list of vertices and a list of edges
self.adjacency = dict([(x,set()) for x in data.vertices()])
for (x,y) in data.edges(): self.adjacency[x].add(y)
self.vertex_labels = {}
if vertex_labels is not None:
for x in self.adjacency:
if x in vertex_labels:
self.vertex_labels[x] = vertex_labels[s]
elif hasattr(data, 'vertex_label'):
for x in self.adjacency:
u = data.vertex_label(x)
if u is not None: self.vertex_labels[x] = u
self.edge_labels = {}
if edge_labels is not None:
for (x,ys) in self.adjacency.items():
for y in ys:
if (x,y) in edge_labels:
self.edge_labels[(x,y)] = edge_labels[(x,y)]
elif hasattr(data, 'edge_label'):
for (x,ys) in self.adjacency.items():
for y in ys:
u = data.edge_label((x,y))
if u is not None: self.edge_labels[(x,y)] = u
def __repr__(self):
return 'Graph({0})'.format(self.adjacency)
def vertices(self):
'''The set vertices of the graph as an iterator.'''
return self.adjacency.keys()
def edges(self):
'''The edges of the graph as an iterator.'''
for (u, vs) in self.adjacency.items():
for v in vs:
yield (u,v)
def opposite(self):
'''The opposite adjacency, i.e., with all edges reversed.'''
if hasattr(self, '_opposite_adjacency'):
return self._opposite_adjacency
else:
self._opposite_adjacency = dict([(x,set()) for x in self.vertices()])
for (x, ys) in self.adjacency.items():
for y in ys:
self._opposite_adjacency[y].add(x)
return self._opposite_adjacency
def vertex_label(self,x):
return self.vertex_labels.get(x)
def edge_label(self,e):
return self.edge_labels.get(e)
def add_vertex(self,x):
if x not in self.adjacency:
self.adjaceny[x] = set()
def remove_vertex(self,x):
del self.adjacency[x]
for xs in self.adjacency.values():
xs.remove(x)
def add_edge(self,e):
(x,y) = e
self.adjacency[x].add(y)
def remove_edge(self,e):
(x,y) = e
self.adjacency[x].remove(y)
def vertex_size(self):
return len(self.vertices())
def edge_size(self):
return len(self.edges())
def product(g,h):
'''The product of graphs g and h.'''
return Graph(vertices = [(x,y) for x in g.vertices() for y in h.vertices()],
edges = [((x,u), (x,v)) for x in g.vertices() for (u,v) in h.edges()] +
[((u,y), (v,y)) for (u,v) in g.edges() for y in h.vertices()])
def cone(g):
'''The cone over g.'''
k = 0
adj = {}
for x in g.vertices():
adj[x] = g.adjacency[x]
if type(x) == int: k = max(k, x+1)
adj[k] = g.vertices()
return Graph(adj)
| bsd-2-clause | -6,448,450,689,787,050,000 | 35.03876 | 88 | 0.52678 | false |
twitterdev/twitter-python-ads-sdk | examples/audience_summary.py | 1 | 1125 | from twitter_ads.client import Client
from twitter_ads.targeting import AudienceSummary
CONSUMER_KEY = 'your consumer key'
CONSUMER_SECRET = 'your consumer secret'
ACCESS_TOKEN = 'access token'
ACCESS_TOKEN_SECRET = 'access token secret'
ACCOUNT_ID = 'account id'
# initialize the client
client = Client(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
# load the advertiser account instance
account = client.accounts(ACCOUNT_ID)
# targeting criteria params
params = {
"targeting_criteria": [
{
"targeting_type":"LOCATION",
"targeting_value":"96683cc9126741d1"
},
{
"targeting_type":"BROAD_KEYWORD",
"targeting_value":"cats"
},
{
"targeting_type":"SIMILAR_TO_FOLLOWERS_OF_USER",
"targeting_value": "14230524"
},
{
"targeting_type":"SIMILAR_TO_FOLLOWERS_OF_USER",
"targeting_value": "90420314"
}
]
}
audience_summary = AudienceSummary.load(account=account, params=params)
print (audience_summary.audience_size)
| mit | 1,429,144,818,825,178,400 | 27.125 | 81 | 0.625778 | false |
pxzhang94/GAN | GAN/wasserstein_gan/wgan_tensorflow.py | 1 | 3148 | import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import os
mb_size = 32
X_dim = 784
z_dim = 10
h_dim = 128
mnist = input_data.read_data_sets('../../MNIST_data', one_hot=True)
def plot(samples):
fig = plt.figure(figsize=(4, 4))
gs = gridspec.GridSpec(4, 4)
gs.update(wspace=0.05, hspace=0.05)
for i, sample in enumerate(samples):
ax = plt.subplot(gs[i])
plt.axis('off')
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_aspect('equal')
plt.imshow(sample.reshape(28, 28), cmap='Greys_r')
return fig
def xavier_init(size):
in_dim = size[0]
xavier_stddev = 1. / tf.sqrt(in_dim / 2.)
return tf.random_normal(shape=size, stddev=xavier_stddev)
X = tf.placeholder(tf.float32, shape=[None, X_dim])
D_W1 = tf.Variable(xavier_init([X_dim, h_dim]))
D_b1 = tf.Variable(tf.zeros(shape=[h_dim]))
D_W2 = tf.Variable(xavier_init([h_dim, 1]))
D_b2 = tf.Variable(tf.zeros(shape=[1]))
theta_D = [D_W1, D_W2, D_b1, D_b2]
z = tf.placeholder(tf.float32, shape=[None, z_dim])
G_W1 = tf.Variable(xavier_init([z_dim, h_dim]))
G_b1 = tf.Variable(tf.zeros(shape=[h_dim]))
G_W2 = tf.Variable(xavier_init([h_dim, X_dim]))
G_b2 = tf.Variable(tf.zeros(shape=[X_dim]))
theta_G = [G_W1, G_W2, G_b1, G_b2]
def sample_z(m, n):
return np.random.uniform(-1., 1., size=[m, n])
def generator(z):
G_h1 = tf.nn.relu(tf.matmul(z, G_W1) + G_b1)
G_log_prob = tf.matmul(G_h1, G_W2) + G_b2
G_prob = tf.nn.sigmoid(G_log_prob)
return G_prob
def discriminator(x):
D_h1 = tf.nn.relu(tf.matmul(x, D_W1) + D_b1)
out = tf.matmul(D_h1, D_W2) + D_b2
return out
G_sample = generator(z)
D_real = discriminator(X)
D_fake = discriminator(G_sample)
D_loss = tf.reduce_mean(D_real) - tf.reduce_mean(D_fake)
G_loss = -tf.reduce_mean(D_fake)
D_solver = (tf.train.RMSPropOptimizer(learning_rate=1e-4)
.minimize(-D_loss, var_list=theta_D))
G_solver = (tf.train.RMSPropOptimizer(learning_rate=1e-4)
.minimize(G_loss, var_list=theta_G))
clip_D = [p.assign(tf.clip_by_value(p, -0.01, 0.01)) for p in theta_D]
sess = tf.Session()
sess.run(tf.global_variables_initializer())
if not os.path.exists('out/'):
os.makedirs('out/')
i = 0
for it in range(1000000):
for _ in range(5):
X_mb, _ = mnist.train.next_batch(mb_size)
_, D_loss_curr, _ = sess.run(
[D_solver, D_loss, clip_D],
feed_dict={X: X_mb, z: sample_z(mb_size, z_dim)}
)
_, G_loss_curr = sess.run(
[G_solver, G_loss],
feed_dict={z: sample_z(mb_size, z_dim)}
)
if it % 100 == 0:
print('Iter: {}; D loss: {:.4}; G_loss: {:.4}'
.format(it, D_loss_curr, G_loss_curr))
if it % 1000 == 0:
samples = sess.run(G_sample, feed_dict={z: sample_z(16, z_dim)})
fig = plot(samples)
plt.savefig('out/{}.png'
.format(str(i).zfill(3)), bbox_inches='tight')
i += 1
plt.close(fig)
| apache-2.0 | -6,329,197,380,667,734,000 | 24.184 | 76 | 0.589263 | false |
retorquere/zotero-better-bibtex | setup/item.py | 1 | 24607 | #!/usr/bin/env python3
from networkx.readwrite import json_graph
from collections import OrderedDict
import hashlib
import operator
import shlex
from functools import reduce
from http.client import RemoteDisconnected
from lxml import etree
from mako import exceptions
from mako.template import Template
from munch import Munch
from pytablewriter import MarkdownTableWriter
from urllib.error import HTTPError
from urllib.request import urlopen, urlretrieve, Request
import glob
import itertools
import json, jsonpatch, jsonpath_ng
import mako
import networkx as nx
import os
import sys
import re
import sys
import tarfile
import tempfile
import zipfile
import fnmatch
root = os.path.join(os.path.dirname(__file__), '..')
print('parsing Zotero/Juris-M schemas')
SCHEMA = Munch(root = os.path.join(root, 'schema'))
ITEMS = os.path.join(root, 'gen/items')
TYPINGS = os.path.join(root, 'gen/typings')
os.makedirs(SCHEMA.root, exist_ok=True)
os.makedirs(ITEMS, exist_ok=True)
os.makedirs(TYPINGS, exist_ok=True)
def readurl(url):
req = Request(url)
if ('api.github.com' in url) and (token := os.environ.get('GITHUB_TOKEN', None)): req.add_header('Authorization', f'token {token}')
return urlopen(req).read().decode('utf-8')
class fetch(object):
def __init__(self, client):
self.schema = os.path.join(SCHEMA.root, f'{client}.json')
if client == 'zotero':
releases = [
ref['ref'].split('/')[-1]
for ref in
json.loads(readurl('https://api.github.com/repos/zotero/zotero/git/refs/tags'))
]
releases += [
rel['version']
for rel in
json.loads(urlopen("https://www.zotero.org/download/client/manifests/release/updates-linux-x86_64.json").read().decode("utf-8"))
if not rel['version'] in releases
]
releases = [rel for rel in releases if rel.startswith('5.')]
releases = sorted(releases, key=lambda r: [int(n) for n in r.replace('m', '.').split('.')])
self.update(
client=client,
releases=releases,
download='https://www.zotero.org/download/client/dl?channel=release&platform=linux-x86_64&version={version}',
jarpath='Zotero_linux-x86_64/zotero.jar',
schema='resource/schema/global/schema.json'
)
elif client == 'jurism':
releases = [
ref['ref'].split('/')[-1].replace('v', '')
for ref in
json.loads(readurl('https://api.github.com/repos/juris-m/zotero/git/refs/tags'))
]
releases += [
rel
for rel in
readurl('https://github.com/Juris-M/assets/releases/download/client%2Freleases%2Fincrementals-linux/incrementals-release-linux').strip().split("\n")
if rel != '' and rel not in releases
]
releases = [rel for rel in releases if rel.startswith('5.') and 'm' in rel and not 'beta' in rel]
releases = sorted(releases, key=lambda r: [int(n) for n in r.replace('m', '.').split('.')])
self.update(
client=client,
releases=releases,
download='https://github.com/Juris-M/assets/releases/download/client%2Frelease%2F{version}/Jurism-{version}_linux-x86_64.tar.bz2',
jarpath='Jurism_linux-x86_64/jurism.jar',
schema='resource/schema/global/schema-jurism.json'
)
else:
raise ValueError(f'Unknown client {client}')
def hash(self, schema):
#print(schema.keys())
#'version', 'itemTypes', 'meta', 'csl', 'locales', 'release', 'hash'
return hashlib.sha512(json.dumps({ k: v for k, v in schema.items() if k in ('itemTypes', 'meta', 'csl')}, sort_keys=True).encode('utf-8')).hexdigest()
def update(self, client, releases, download, jarpath, schema):
hashes_cache = os.path.join(SCHEMA.root, 'hashes.json')
itemtypes = os.path.join(SCHEMA.root, f'{client}-type-ids.json')
if os.path.exists(hashes_cache):
with open(hashes_cache) as f:
hashes = json.load(f, object_hook=OrderedDict)
else:
hashes = OrderedDict()
if not client in hashes:
hashes[client] = OrderedDict()
current = releases[-1]
if current in hashes[client] and os.path.exists(self.schema) and os.path.exists(itemtypes):
return
elif 'CI' in os.environ:
raise ValueError(f'{self.schema} out of date')
print(' updating', os.path.basename(self.schema))
for release in releases:
if release != current and release in hashes[client]: continue
with tempfile.NamedTemporaryFile() as tarball:
print(' downloading', download.format(version=release))
try:
urlretrieve(download.format(version=release), tarball.name)
tar = tarfile.open(tarball.name, 'r:bz2')
jar = tar.getmember(jarpath)
print(' extracting', jar.name)
jar.name = os.path.basename(jar.name)
tar.extract(jar, path=os.path.dirname(tarball.name))
jar = zipfile.ZipFile(os.path.join(os.path.dirname(tarball.name), jar.name))
itt = fnmatch.filter(jar.namelist(), f'**/system-*-{client}.sql')
assert len(itt) <= 1, itt
if len(itt) == 1:
itt = itt[0]
else:
itt = fnmatch.filter(jar.namelist(), '**/system-*.sql')
assert len(itt) == 1, itt
itt = itt[0]
with jar.open(itt) as f, open(itemtypes, 'wb') as i:
i.write(f.read())
try:
with jar.open(schema) as f:
client_schema = json.load(f)
with open(self.schema, 'w') as f:
json.dump(client_schema, f, indent=' ')
hashes[client][release] = self.hash(client_schema)
print(' release', release, 'schema', client_schema['version'], 'hash', hashes[client][release])
except KeyError:
hashes[client][release] = None
print(' release', release, 'does not have a bundled schema')
except HTTPError as e:
if e.code in [ 403, 404 ]:
print(' release', release, 'not available')
hashes[client][release] = None
else:
raise e
with open(hashes_cache, 'w') as f:
json.dump(hashes, f, indent=' ')
def __enter__(self):
self.f = open(self.schema)
return self.f
def __exit__(self, type, value, traceback):
self.f.close()
class jsonpath:
finders = {}
@classmethod
def parse(cls, path):
if not path in cls.finders: cls.finders[path] = jsonpath_ng.parse(path)
return cls.finders[path]
def patch(s, *ps):
# field/type order doesn't matter for BBT
for it in s['itemTypes']:
assert 'creatorTypes' in it
# assures primary is first
assert len(it['creatorTypes'])== 0 or [ct['creatorType'] for ct in it['creatorTypes'] if ct.get('primary', False)] == [it['creatorTypes'][0]['creatorType']]
s['itemTypes'] = {
itemType['itemType']: {
'itemType': itemType['itemType'],
'fields': { field['field']: field.get('baseField', field['field']) for field in itemType['fields'] },
'creatorTypes': [ct['creatorType'] for ct in itemType['creatorTypes'] ]
}
for itemType in s['itemTypes']
}
del s['locales']
for p in ps:
print('applying', p)
with open(os.path.join(SCHEMA.root, p)) as f:
s = jsonpatch.apply_patch(s, json.load(f))
return s
class ExtraFields:
def __init__(self):
self.changeid = 0
self.dg = nx.DiGraph()
self.color = Munch(
zotero='#33cccc',
csl='#99CC00',
label='#C0C0C0',
removed='#666666',
added='#0000FF'
)
def make_label(self, field):
label = field.replace('_', ' ').replace('-', ' ')
label = re.sub(r'([a-z])([A-Z])', r'\1 \2', label)
label = label.lower()
return label
def add_label(self, domain, name, label):
assert domain in ['csl', 'zotero'], (domain, name, label)
assert type(name) == str
assert type(label) == str
for label in [label, self.make_label(label)]:
attrs = {
'domain': 'label',
'name': label,
'graphics': {'h': 30.0, 'w': 7 * len(label), 'hasFill': 0, 'outline': self.color.label},
}
if re.search(r'[-_A-Z]', label): attrs['LabelGraphics'] = { 'color': self.color.label }
self.dg.add_node(f'label:{label}', **attrs)
self.dg.add_edge(f'label:{label}', f'{domain}:{name}', graphics={ 'targetArrow': 'standard' })
def add_mapping(self, from_, to, reverse=True):
mappings = [(from_, to)]
if reverse: mappings.append((to, from_))
for from_, to in mappings:
self.dg.add_edge(':'.join(from_), ':'.join(to), graphics={ 'targetArrow': 'standard' })
def add_var(self, domain, name, type_, client):
assert domain in ['csl', 'zotero']
assert type(name) == str
assert type_ in ['name', 'date', 'text']
node_id = f'{domain}:{name}'
if node_id in self.dg.nodes:
assert self.dg.nodes[node_id]['type'] == type_, (domain, name, self.dg.nodes[node_id]['type'], type_)
else:
self.dg.add_node(f'{domain}:{name}', domain=domain, name=name, type=type_, graphics={'h': 30.0, 'w': 7 * len(name), 'fill': self.color[domain]})
self.dg.nodes[node_id][client] = True
def load(self, schema, client):
typeof = {}
for field, meta in schema.meta.fields.items():
typeof[field] = meta.type
# add nodes & edges
for field, baseField in {str(f.path): f.value for f in jsonpath.parse('$.itemTypes.*.fields.*').find(schema)}.items():
self.add_var(domain='zotero', name=baseField, type_=typeof.get(baseField, 'text'), client=client)
for field in jsonpath.parse('$.itemTypes.*.creatorTypes[*]').find(schema):
self.add_var(domain='zotero', name=field.value, type_='name', client=client)
for fields in jsonpath.parse('$.csl.fields.text').find(schema):
for csl, zotero in fields.value.items():
self.add_var(domain='csl', name=csl, type_='text', client=client)
for field in zotero:
self.add_var(domain='zotero', name=field, type_='text', client=client)
self.add_mapping(from_=('csl', csl), to=('zotero', field))
for fields in jsonpath.parse('$.csl.fields.date').find(schema):
for csl, zotero in fields.value.items():
self.add_var(domain='csl', name=csl, type_='date', client=client)
if type(zotero) == str: zotero = [zotero] # juris-m has a list here, zotero strings
for field in zotero:
self.add_var(domain='zotero', name=field, type_='date', client=client)
self.add_mapping(from_=('csl', csl), to=('zotero', field))
for zotero, csl in schema.csl.names.items():
self.add_var(domain='csl', name=csl, type_='name', client=client)
self.add_var(domain='zotero', name=zotero, type_='name', client=client)
self.add_mapping(from_=('csl', csl), to=('zotero', zotero))
for field, type_ in schema.csl.unmapped.items():
if type_ != 'type': self.add_var(domain='csl', name=field, type_=type_, client=client)
# add labels
for node, data in list(self.dg.nodes(data=True)):
if data['domain'] == 'label': continue # how is this possible?
self.add_label(data['domain'], data['name'], data['name'])
for field, baseField in {str(f.path): f.value for f in jsonpath.parse('$.itemTypes.*.fields.*').find(schema)}.items():
if field == baseField: continue
self.add_label('zotero', baseField, field)
for alias, field in schema.csl.alias.items():
self.add_label('csl', field, alias)
def add_change(self, label, change):
if not label or label == '':
return str(change)
else:
return ','.join(label.split(',') + [ str(change) ])
def save(self):
stringizer = lambda x: self.dg.nodes[x]['name'] if x in self.dg.nodes else x
# remove multi-line text fields
for node, data in list(self.dg.nodes(data=True)):
if data['domain'] + '.' + data['name'] in [ 'zotero.abstractNote', 'zotero.extra', 'csl.abstract', 'csl.note' ]:
self.dg.remove_node(node)
# remove two or more incoming var edges, as that would incur overwrites (= data loss)
removed = set()
for node, data in self.dg.nodes(data=True):
incoming = reduce(lambda acc, edge: acc[self.dg.nodes[edge[0]]['domain']].append(edge) or acc, self.dg.in_edges(node), Munch(zotero=[], csl=[], label=[]))
for domain, edges in incoming.items():
if domain == 'label' or len(edges) < 2: continue
self.changeid += 1
for edge in edges:
removed.add(edge)
self.dg.edges[edge].update({
'removed': True,
'label': self.add_change(self.dg.edges[edge].get('label'), self.changeid),
'graphics': { 'style': 'dashed', 'fill': self.color.removed, 'targetArrow': 'standard' },
'LabelGraphics': { 'color': self.color.label },
})
# hop-through labels. Memorize here which labels had a direct connection *before any expansion*
labels = {
label: set([self.dg.nodes[edge[1]]['domain'] for edge in self.dg.out_edges(label)])
for label, data in self.dg.nodes(data=True)
if data['domain'] == 'label' and not re.search(r'[-_A-Z]', data['name']) # a label but not a shadow label
}
for u, vs in dict(nx.all_pairs_dijkstra_path(self.dg, weight=lambda u, v, d: None if d.get('removed', False) else 1)).items():
# only interested in shortest paths that originate in a label
if not u in labels: continue
for v, path in vs.items():
if u == v: continue # no loops obviously
if self.dg.has_edge(u, v): continue # already in place
if len(path) != 3: continue # only consider one-step hop-through
# TODO: label already has direct edge to the hop-through domain -- this entails fanning out the data unnecesarily
if self.dg.nodes[v]['domain'] in labels[u]: continue
self.changeid += 1
for edge in zip(path, path[1:]):
self.dg.edges[edge].update({
'label': self.add_change(self.dg.edges[edge].get('label'), self.changeid),
})
self.dg.add_edge(u, v, label=str(self.changeid), added=True, graphics={ 'style': 'dashed', 'fill': self.color.added, 'targetArrow': 'standard' })
for u, vs in dict(nx.all_pairs_shortest_path(self.dg)).items():
if self.dg.nodes[u]['domain'] != 'label': continue
for v, path in vs.items():
# length of 3 means potential hop-through node
if u != v and len(path) == 3 and len(set(zip(path, path[1:])).intersection(removed)) > 0:
#print('removed', path)
pass
#for i, sg in enumerate(nx.weakly_connected_components(self.dg)):
# nx.draw(self.dg.subgraph(sg), with_labels=True)
# plt.savefig(f'{i}.png')
mapping = {}
for label, data in list(self.dg.nodes(data=True)):
if data['domain'] != 'label': continue
name = data['name']
var_nodes = [var for _, var in self.dg.out_edges(label)]
if len(var_nodes) == 0:
self.dg.remove_node(label)
else:
for var in var_nodes:
var = self.dg.nodes[var]
if not name in mapping: mapping[name] = {}
assert 'type' not in mapping[name] or mapping[name]['type'] == var['type']
mapping[name]['type'] = var['type']
domain = var['domain']
if not domain in mapping[name]: mapping[name][domain] = []
mapping[name][domain].append(var['name'])
# ensure names don't get mapped to multiple fields
for var, mapped in mapping.items():
if mapped['type'] != 'name': continue
assert len(mapped.get('zotero', [])) <= 1, (var, mapped)
assert len(mapped.get('csl', [])) <= 1, (var, mapped)
# docs
with open(os.path.join(root, 'site/layouts/shortcodes/extra-fields.md'), 'w') as f:
writer = MarkdownTableWriter()
writer.headers = ['label', 'type', 'zotero/jurism', 'csl']
writer.value_matrix = []
doc = {}
for label, data in self.dg.nodes(data=True):
if not ' ' in label or data['domain'] != 'label': continue
name = data['name']
doc[name] = {'zotero': [], 'csl': []}
for _, to in self.dg.out_edges(label):
data = self.dg.nodes[to]
if not 'type' in doc[name]:
doc[name]['type'] = data['type']
else:
assert doc[name]['type'] == data['type']
if data.get('zotero', False) == data.get('jurism', False):
postfix = ''
elif data.get('zotero'):
postfix = '\u00B2'
else:
postfix = '\u00B9'
doc[name][data['domain']].append(data['name'].replace('_', '\\_') + postfix)
for label, data in sorted(doc.items(), key=lambda x: x[0]):
writer.value_matrix.append((f'**{label}**', data['type'], ' / '.join(sorted(data['zotero'])), ' / '.join(sorted(data['csl']))))
writer.stream = f
writer.write_table()
with open(os.path.join(ITEMS, 'extra-fields.json'), 'w') as f:
json.dump(mapping, f, sort_keys=True, indent=' ')
# remove phantom labels for clarity
for label in [node for node, data in self.dg.nodes(data=True) if data['domain'] == 'label' and 'LabelGraphics' in data]:
self.dg.remove_node(label)
nx.write_gml(self.dg, 'mapping.gml', stringizer)
#with open('extra-fields-graph.json', 'w') as f:
# json.dump(json_graph.node_link_data(self.dg, {"link": "edges", "source": "from", "target": "to"}), f)
# # https://github.com/vasturiano/3d-force-graph
# https://neo4j.com/developer-blog/visualizing-graphs-in-3d-with-webgl/
#with open('mapping.json', 'w') as f:
# data = nx.readwrite.json_graph.node_link_data(self.dg)
# for node in data['nodes']:
# node.pop('graphics', None)
# node.pop('type', None)
# node['label'] = node.pop('name')
# for link in data['links']:
# link.pop('graphics', None)
# link.pop('LabelGraphics', None)
# json.dump(data, f, indent=' ')
with fetch('zotero') as z, fetch('jurism') as j:
print(' writing extra-fields')
ef = ExtraFields()
SCHEMA.zotero = Munch.fromDict(patch(json.load(z), 'schema.patch', 'zotero.patch'))
SCHEMA.jurism = Munch.fromDict(patch(json.load(j), 'schema.patch', 'jurism.patch'))
#with open('schema.json', 'w') as f:
# json.dump(SCHEMA.jurism, f, indent=' ')
# test for inconsistent basefield mapping
for schema in ['jurism', 'zotero']:
fieldmap = {}
for field_path, field, baseField in [(str(f.full_path), str(f.path), f.value) for f in jsonpath.parse(f'$.itemTypes.*.fields.*').find(SCHEMA[schema])]:
if not field in fieldmap:
fieldmap[field] = baseField
else:
assert baseField == fieldmap[field], (schema, field_path, baseField, fieldmap[field])
ef.load(SCHEMA.jurism, 'jurism')
ef.load(SCHEMA.zotero, 'zotero')
ef.save()
with open(os.path.join(SCHEMA.root, 'hashes.json')) as f:
min_version = {}
hashes = json.load(f, object_hook=OrderedDict)
for client in hashes.keys():
releases = [rel for rel, h in hashes[client].items() if h is not None]
current = releases[-1]
min_version[client] = current
for rel in reversed(releases):
if hashes[client][rel] != hashes[client][current]:
break
else:
min_version[client] = rel
with open(os.path.join(root, 'schema', 'supported.json'), 'w') as f:
json.dump(min_version, f)
print(' writing creators')
creators = {'zotero': {}, 'jurism': {}}
for creatorTypes in jsonpath.parse('*.itemTypes.*.creatorTypes').find(SCHEMA):
if len(creatorTypes.value) == 0: continue
client, itemType = operator.itemgetter(0, 2)(str(creatorTypes.full_path).split('.'))
if not itemType in creators[client]: creators[client][itemType] = []
for creatorType in creatorTypes.value:
creators[client][itemType].append(creatorType)
with open(os.path.join(ITEMS, 'creators.json'), 'w') as f:
json.dump(creators, f, indent=' ', default=lambda x: list(x))
def template(tmpl):
return Template(filename=os.path.join(root, 'setup/templates', tmpl))
print(' writing typing for serialized item')
with open(os.path.join(TYPINGS, 'serialized-item.d.ts'), 'w') as f:
fields = sorted(list(set(field.value for field in jsonpath.parse('*.itemTypes.*.fields.*').find(SCHEMA))))
itemTypes = sorted(list(set(field.value for field in jsonpath.parse('*.itemTypes.*.itemType').find(SCHEMA))))
print(template('items/serialized-item.d.ts.mako').render(fields=fields, itemTypes=itemTypes).strip(), file=f)
print(' writing field simplifier')
with open(os.path.join(ITEMS, 'items.ts'), 'w') as f:
valid = Munch(type={}, field={})
for itemType in jsonpath.parse('*.itemTypes.*.itemType').find(SCHEMA):
client = str(itemType.full_path).split('.')[0]
itemType = itemType.value
if not itemType in valid.type:
valid.type[itemType] = client
if itemType == 'note':
valid.field[itemType] = {field: 'true' for field in 'itemType tags note id itemID dateAdded dateModified'.split(' ')}
elif itemType == 'attachment':
valid.field[itemType] = {field: 'true' for field in 'itemType tags id itemID dateAdded dateModified'.split(' ')}
else:
valid.field[itemType] = {field: 'true' for field in 'itemType creators tags attachments notes seeAlso id itemID dateAdded dateModified multi'.split(' ')}
elif valid.type[itemType] != client:
valid.type[itemType] = 'true'
for field in jsonpath.parse('*.itemTypes.*.fields.*').find(SCHEMA):
client, itemType = operator.itemgetter(0, 2)(str(field.full_path).split('.'))
for field in [str(field.path), field.value]:
if not field in valid.field[itemType]:
valid.field[itemType][field] = client
elif valid.field[itemType][field] != client:
valid.field[itemType][field] = 'true'
# map aliases to base names
DG = nx.DiGraph()
for field in jsonpath.parse('*.itemTypes.*.fields.*').find(SCHEMA):
client = str(field.full_path).split('.')[0]
baseField = field.value
field = str(field.path)
if field == baseField: continue
if not (data := DG.get_edge_data(field, baseField, default=None)):
DG.add_edge(field, baseField, client=client)
elif data['client'] != client:
DG.edges[field, baseField]['client'] = 'both'
aliases = {}
for field, baseField, client in DG.edges.data('client'):
if not client in aliases: aliases[client] = {}
if not baseField in aliases[client]: aliases[client][baseField] = []
aliases[client][baseField].append(field)
# map names to basenames
names = Munch(field={}, type={})
names.field['dateadded'] = Munch(jurism='dateAdded', zotero='dateAdded')
names.field['datemodified'] = Munch(jurism='dateModified', zotero='dateModified')
labels = {}
for field in jsonpath.parse('*.itemTypes.*.fields.*').find(SCHEMA):
client, itemType = operator.itemgetter(0, 2)(str(field.full_path).split('.'))
baseField = field.value
field = str(field.path)
for section, field, name in [('field', field.lower(), baseField), ('field', baseField.lower(), baseField), ('type', itemType.lower(), itemType)]:
if not field in names[section]:
names[section][field] = Munch.fromDict({ client: name })
elif not client in names[section][field]:
names[section][field][client] = name
else:
assert names[section][field][client] == name, (client, section, field, names[section][field][client], name)
if name == 'numPages':
label = 'Number of pages'
else:
label = name[0].upper() + re.sub('([a-z])([A-Z])', lambda m: m.group(1) + ' ' + m.group(2).lower(), re.sub('[-_]', ' ', name[1:]))
if not field in labels:
labels[field] = Munch.fromDict({ client: label })
elif not client in labels[field]:
labels[field][client] = label
else:
assert labels[field][client] == label, (client, field, labels[field][client], label)
try:
print(template('items/items.ts.mako').render(names=names, labels=labels, valid=valid, aliases=aliases).strip(), file=f)
except:
print(exceptions.text_error_template().render())
#stringizer = lambda x: DG.nodes[x]['name'] if x in DG.nodes else x
#nx.write_gml(DG, 'fields.gml') # , stringizer)
print(' writing csl-types')
with open(os.path.join(ITEMS, 'csl-types.json'), 'w') as f:
types = set()
for type_ in jsonpath.parse('*.csl.types.*').find(SCHEMA):
types.add(str(type_.full_path).split('.')[-1])
for type_ in jsonpath.parse('*.csl.unmapped.*').find(SCHEMA):
if type_.value == 'type': types.add(str(type_.full_path).split('.')[-1])
json.dump(list(types), f)
| mit | -3,831,744,764,635,071,500 | 40.011667 | 161 | 0.61897 | false |
hasadna/OpenTrain | webserver/opentrain/timetable/utils.py | 1 | 3883 | from models import TtStop,TtStopTime,TtTrip
import gtfs.models
from timetable.models import TtShape
import json
from common import ot_utils
import datetime
def build_from_gtfs(start_date,days=30):
build_stops()
end_date = start_date + datetime.timedelta(days=days-1)
print '=' * 50
print 'Start day = %s' % (start_date)
print 'End day = %s' % (end_date)
clean_trips(start_date, end_date)
build_trips(start_date, end_date)
def build_stops():
stops = gtfs.models.Stop.objects.all()
for stop in stops:
if not TtStop.objects.filter(gtfs_stop_id=stop.stop_id).exists():
new_stop = TtStop(gtfs_stop_id = stop.stop_id,
stop_name = stop.stop_name,
stop_lat = stop.stop_lat,
stop_lon = stop.stop_lon,
stop_url = stop.stop_url)
new_stop.save()
print 'Added stop %s' % (new_stop)
def clean_trips(from_date,to_date):
qs = TtTrip.objects.filter(date__gte=from_date).filter(date__lte=to_date)
print 'Going to delete %s trips of dates %s to %s (incl)' % (qs.count(),from_date,to_date)
qs.delete()
def build_trips(from_date=None,to_date=None):
trips = gtfs.models.Trip.objects.all()
date_str = ot_utils.get_date_underscored()
print 'Total number of trips: %s' % (trips.count())
if from_date:
trips = trips.filter(service__start_date__gte=from_date)
if to_date:
trips = trips.filter(service__end_date__lte=to_date)
print 'number of trips in date range %s' % (trips.count())
trips_count = trips.count()
for idx,trip in enumerate(trips):
print 'Building trip %s/%s' % (idx,trips_count)
trip_date = trip.service.start_date
new_trip = TtTrip()
new_trip.gtfs_trip_id = trip.trip_id
new_trip.date = trip_date
assert trip.service.start_date == trip.service.end_date
new_trip.shape = _get_or_build_shape(trip.shape_id, date_str)
new_trip.save()
_build_stoptimes(new_trip,trip)
stops = list(new_trip.get_stop_times())
new_trip.from_stoptime = stops[0]
new_trip.to_stoptime = stops[-1]
new_trip.save()
def _get_or_build_shape(gtfs_shape_id,date_str):
try:
ttshape = TtShape.objects.get(gtfs_shape_id=gtfs_shape_id,gtfs_date_str=date_str)
return ttshape
except TtShape.DoesNotExist:
return _build_shape(gtfs_shape_id,date_str)
def _build_shape(gtfs_shape_id,date_str):
print 'Building shape for gtfs shape id = %s date_str = %s' % (gtfs_shape_id,date_str)
points = gtfs.models.Shape.objects.filter(shape_id=gtfs_shape_id).order_by('shape_pt_sequence')
point_list = []
for point in points:
point_list.append([point.shape_pt_lat,point.shape_pt_lon])
ttshape = TtShape(gtfs_shape_id=gtfs_shape_id,
gtfs_date_str=date_str,
points=json.dumps(point_list))
ttshape.save()
return ttshape
def _build_stoptimes(new_trip,trip):
stoptimes = trip.stoptime_set.all().order_by('stop_sequence')
new_stoptimes = []
for stoptime in stoptimes:
new_stop = TtStop.objects.get(gtfs_stop_id=stoptime.stop.stop_id)
exp_arrival = ot_utils.db_time_to_datetime(stoptime.arrival_time,new_trip.date)
exp_departure = ot_utils.db_time_to_datetime(stoptime.departure_time,new_trip.date)
new_stoptime = TtStopTime(stop=new_stop,
stop_sequence=stoptime.stop_sequence,
trip=new_trip,
exp_arrival=exp_arrival,
exp_departure=exp_departure)
new_stoptimes.append(new_stoptime)
TtStopTime.objects.bulk_create(new_stoptimes)
| bsd-3-clause | -7,730,519,627,983,113,000 | 40.308511 | 99 | 0.60443 | false |
aaronj1335/cs388-final-project | bin/plot.py | 1 | 6512 | #!/usr/bin/env python
import os
from os.path import join
from itertools import chain
from math import log
from pylab import plot, show, legend, close, figure, title, xlabel, ylabel, barh, savefig
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.ticker import FuncFormatter
N = 0 # problem size
P1 = 1 # num of level 1 threads
P2 = 2 # num of level 2 threads
T = 3 # running time
# we still store all figures here
out_dir = "report/figures/"
# make the output directory if necessary
if not os.path.exists(out_dir):
os.makedirs(out_dir)
BLOCK = False
if 'BLOCK' not in globals():
BLOCK = True
def prettify(func):
def inner(*args, **kwargs):
# update a few params about the
mpl.rcParams['font.family'] = 'serif'
return func(*args, **kwargs)
return inner
def is_truthy(x):
return bool(x)
def partition_by(data, key_fn):
items = {}
for i in data:
key = key_fn(i)
if key not in items:
items[key] = []
items[key].append(i)
return items.values()
def read_data(d):
params = lambda f: tuple(map(int, f.split('.')[0].split('_')))
time = lambda f: (float(open(join(d, f)).read()),)
return sorted(params(f) + time(f) for f in os.listdir(d))
def weak_scaling_data(data):
wsdata = lambda d: [(i[P1] * i[P2], i[T], i) for i in d]
key_fn = lambda i: i[N] / (i[P1] * i[P2])
partitioned = partition_by(data, key_fn)
# select the fastest numbers
sets = []
ds = filter(is_truthy, [wsdata(d) for d in partitioned])
for d in ds:
seen = {}
for i in d:
if i[0] not in seen or seen[i[0]][1] > i[1]:
seen[i[0]] = i
sets.append(sorted(seen.values()))
max_len = max(map(len, sets))
sets = filter(lambda s: len(s) == max_len, sets)
return sets
def strong_scaling_data(data):
ssdata = lambda d: [(i[P1] * i[P2], i[T], i) for i in d]
key_fn = lambda i: i[N]
partitioned = partition_by(data, key_fn)
# select the fastest numbers
sets = []
ds = filter(is_truthy, [ssdata(d) for d in partitioned])
for d in ds:
seen = {}
for i in d:
if i[0] not in seen or seen[i[0]][1] > i[1]:
seen[i[0]] = i
sets.append(sorted(seen.values()))
return sets
@prettify
def plot_scaling(data, the_title, munger, labeler):
figure()
for d in munger(data):
zippd = zip(*d)
# special case
if 'Intel Weak' in the_title:
if str(2048*24) not in labeler(d):
continue
plot(zippd[0], zippd[1], 'o-', label=labeler(d))
legend()
xlabel('Threads')
ylabel('Time (seconds)')
ax = plt.gca()
current = map(int, ax.get_xticks())
# just to wash out dups
padding = sorted(set(current))
# put one tick at the end
padding += [max(padding) + padding[1] - padding[0]]
# ensure we start from zero
if padding[0] != 0:
padding = [0] + padding
# finalize xticks
ax.set_xticks(padding)
if 'Intel Weak' in the_title:
# force y axis to be int
yticks = ax.get_yticks()
# ensure these are ints
bounds = map(int, (min(yticks), max(yticks)))
ax.set_yticks(range(bounds[0]-2, bounds[1]+3))
t = "_".join(the_title.lower().split()) + ".pdf"
savefig(out_dir + t, dpi=100, format='pdf')
print t
def plot_weak_scaling(data, dataset=''):
labeler = lambda d: 'Ratio: ' + str(d[0][2][0] / (d[0][2][1] * d[0][2][2]) * 24)
plot_scaling(data, the_title=(dataset + ' Weak Scaling'),
munger=weak_scaling_data, labeler=labeler)
def plot_strong_scaling(data, dataset=''):
# need to multiply by 24 to find true problem size
labeler = lambda d: 'Problem size: ' + str(d[0][2][N] * 24)
plot_scaling(data, the_title=(dataset + ' Strong Scaling'),
munger=strong_scaling_data, labeler=labeler)
@prettify
def plot_parallelization_levels(data, n, p, dataset=''):
figure()
t = 'Coarse versus fine-grained parallelism'
if dataset:
t += ' (' + dataset + ')'
d = [(i[T], '%d X %d' % (i[P1], i[P2]))
for idx, i in enumerate(data)
if i[N] == n and i[P1] * i[P2] == p]
zippd = zip(*d)
xs = range(len(zippd[0]))
plot(xs, zippd[0], 'o-', label='Problem size: ' + str(n))
plt.xticks(xs, zippd[1])
legend()
xlabel('Coarse grained threads X fine grained threads')
ylabel('Time (seconds)')
t = "_".join(t.lower().split()) + ".pdf"
savefig(out_dir + t, dpi=100, format='pdf')
print t
@prettify
def plot_compiler_difference(gcc, intel):
n = max(i[N] for i in gcc)
gcc = [i for i in gcc if i[N] == n and i[P2] == 1]
intel = [i for i in intel if i[N] == n and i[P2] == 1]
d = [(i[P1] - 0.5, (i[T] - g[T]) / min(g[T], i[T]) * 100.)
for i, g in zip(intel, gcc)]
zippd = zip(*d)
figure()
plt.gca().xaxis.set_major_formatter(
FuncFormatter(lambda v, p: str(v) + ' %'))
t = 'Comparison of Intel and GNU comiler performance'
barh(zippd[0], zippd[1])
ylabel('Threads')
xlabel('Speedup')
t = "_".join(t.lower().split()) + ".pdf"
savefig(out_dir + t, dpi=100, format='pdf')
print t
data = wdata = sdata = intel_total_time = gcc_total_time = gcc_data = intel_data = None
if __name__ == '__main__':
close(); close(); close(); close(); close(); close(); close(); close(); close(); # lololol
data = gcc_data = read_data('goodtokno/tacc_gcc47_O3_2048')
wdata = weak_scaling_data(data)
sdata = strong_scaling_data(data)
gcc_total_time = sum(map(lambda i: i[T], data))
plot_strong_scaling(data, dataset='GCC')
plot_weak_scaling(data, dataset='GCC')
data = intel_data = read_data('goodtokno/tacc_intel_O3_8192')
wdata = weak_scaling_data(data)
sdata = strong_scaling_data(data)
intel_total_time = sum(map(lambda i: i[T], data))
plot_strong_scaling(data, dataset='Intel')
plot_weak_scaling(data, dataset='Intel')
plot_parallelization_levels(intel_data, 8192, 8, dataset='Intel')
plot_parallelization_levels(gcc_data, 2048, 8, dataset='GCC')
plot_compiler_difference(gcc_data, intel_data)
data = read_data('goodtokno/tacc_gcc47_O3_coarsevsfineperf')
plot_strong_scaling(data, dataset='GCC Without Nested Parallelism -')
if BLOCK:
raw_input()
| bsd-2-clause | -7,641,680,713,887,290,000 | 27.814159 | 94 | 0.578163 | false |
CommunityHoneyNetwork/CHN-Server | mhn/auth/views.py | 2 | 5341 | import hashlib
import random
from flask import Blueprint, request, jsonify
from flask_mail import Message
from sqlalchemy.exc import IntegrityError
from flask_security.utils import (
login_user as login, verify_and_update_password,
encrypt_password, logout_user as logout)
from mhn import db, mail
from mhn import user_datastore
from mhn.common.utils import error_response
from mhn.auth.models import User, PasswdReset, ApiKey
from mhn.auth import errors
from mhn.auth import (
get_datastore, login_required, roles_accepted, current_user)
from mhn.api import errors as apierrors
import uuid
auth = Blueprint('auth', __name__, url_prefix='/auth')
@auth.route('/login/', methods=['POST'])
def login_user():
if 'email' not in request.json:
return error_response(errors.AUTH_EMAIL_MISSING, 400)
if 'password' not in request.json:
return error_response(errors.AUTH_PSSWD_MISSING, 400)
# email and password are in the posted data.
user = User.query.filter_by(
email=request.json.get('email')).first()
psswd_check = False
if user:
psswd_check = verify_and_update_password(
request.json.get('password'), user)
if user and psswd_check:
login(user, remember=True)
return jsonify(user.to_dict())
else:
return error_response(errors.AUTH_INCORRECT_CREDENTIALS, 401)
@auth.route('/logout/', methods=['GET'])
def logout_user():
logout()
return jsonify({})
@auth.route('/user/', methods=['POST'])
@auth.route('/register/', methods=['POST'])
@roles_accepted('admin')
def create_user():
missing = User.check_required(request.json)
if missing:
return error_response(
apierrors.API_FIELDS_MISSING.format(missing), 400)
else:
user = get_datastore().create_user(
email=request.json.get('email'),
password=encrypt_password(request.json.get('password')))
userrole = user_datastore.find_role('admin')
user_datastore.add_role_to_user(user, userrole)
try:
db.session.add(user)
db.session.flush()
apikey = ApiKey(user_id=user.id, api_key=str(uuid.uuid4()).replace("-", ""))
db.session.add(apikey)
db.session.commit()
except IntegrityError:
return error_response(errors.AUTH_USERNAME_EXISTS, 400)
else:
return jsonify(user.to_dict())
@auth.route('/user/<user_id>/', methods=['DELETE'])
@roles_accepted('admin')
def delete_user(user_id):
user = User.query.get(user_id)
if not user:
return error_response(errors.AUTH_NOT_FOUND.format(user_id), 404)
user.active= False
db.session.add(user)
db.session.commit()
return jsonify({})
@auth.route('/resetrequest/', methods=['POST'])
def reset_passwd_request():
if 'email' not in request.json:
return error_response(errors.AUTH_EMAIL_MISSING, 400)
email = request.json['email']
user = User.query.filter_by(email=email).first()
if not user:
return error_response(errors.AUTH_NOT_FOUND.format(email), 404)
hashstr = hashlib.sha1(str(random.getrandbits(128)) + user.email).hexdigest()
# Deactivate all other password resets for this user.
PasswdReset.query.filter_by(user=user).update({'active': False})
reset = PasswdReset(hashstr=hashstr, active=True, user=user)
db.session.add(reset)
db.session.commit()
# Send password reset email to user.
from mhn import mhn
msg = Message(
html=reset.email_body, subject='MHN Password reset',
recipients=[user.email], sender=mhn.config['DEFAULT_MAIL_SENDER'])
try:
mail.send(msg)
except:
return error_response(errors.AUTH_SMTP_ERROR, 500)
else:
return jsonify({})
@auth.route('/changepass/', methods=['POST'])
def change_passwd():
password = request.json.get('password')
password_repeat = request.json.get('password_repeat')
if not password or not password_repeat:
# Request body is not complete.
return error_response(errors.AUTH_RESET_MISSING, 400)
if password != password_repeat:
# Password do not match.
return error_response(errors.AUTH_PASSWD_MATCH, 400)
if current_user.is_authenticated:
# No need to check password hash object or email.
user = current_user
else:
email = request.json.get('email')
hashstr = request.json.get('hashstr')
if not email or not hashstr:
# Request body is not complete for not authenticated
# request, ie, uses password reset hash.
return error_response(errors.AUTH_RESET_MISSING, 400)
reset = db.session.query(PasswdReset).join(User).\
filter(User.email == email, PasswdReset.active == True).\
filter(PasswdReset.hashstr == hashstr).\
first()
if not reset:
return error_response(errors.AUTH_RESET_HASH, 404)
db.session.add(reset)
reset.active = False
user = reset.user
user.password = encrypt_password(password)
db.session.add(user)
db.session.commit()
return jsonify({})
@auth.route('/me/', methods=['GET'])
@login_required
def get_user():
return jsonify(current_user.to_dict())
| lgpl-2.1 | 2,775,682,520,948,707,000 | 33.458065 | 88 | 0.644823 | false |
tlksio/tlksio | env/lib/python3.4/site-packages/pylint/checkers/base.py | 1 | 37578 | # Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
# Copyright (c) 2009-2010 Arista Networks, Inc.
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""basic checker for Python code"""
from logilab import astng
from logilab.common.ureports import Table
from logilab.astng import are_exclusive
from pylint.interfaces import IASTNGChecker
from pylint.reporters import diff_string
from pylint.checkers import BaseChecker, EmptyReport
from pylint.checkers.utils import (
check_messages,
clobber_in_except,
is_inside_except,
safe_infer,
)
import re
# regex for class/function/variable/constant name
CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$')
CONST_NAME_RGX = re.compile('(([A-Z_][A-Z0-9_]*)|(__.*__))$')
COMP_VAR_RGX = re.compile('[A-Za-z_][A-Za-z0-9_]*$')
DEFAULT_NAME_RGX = re.compile('[a-z_][a-z0-9_]{2,30}$')
# do not require a doc string on system methods
NO_REQUIRED_DOC_RGX = re.compile('__.*__')
del re
def in_loop(node):
"""return True if the node is inside a kind of for loop"""
parent = node.parent
while parent is not None:
if isinstance(parent, (astng.For, astng.ListComp, astng.SetComp,
astng.DictComp, astng.GenExpr)):
return True
parent = parent.parent
return False
def in_nested_list(nested_list, obj):
"""return true if the object is an element of <nested_list> or of a nested
list
"""
for elmt in nested_list:
if isinstance(elmt, (list, tuple)):
if in_nested_list(elmt, obj):
return True
elif elmt == obj:
return True
return False
def _loop_exits_early(loop):
"""Returns true if a loop has a break statement in its body."""
loop_nodes = (astng.For, astng.While)
# Loop over body explicitly to avoid matching break statements
# in orelse.
for child in loop.body:
if isinstance(child, loop_nodes):
continue
for _ in child.nodes_of_class(astng.Break, skip_klass=loop_nodes):
return True
return False
def report_by_type_stats(sect, stats, old_stats):
"""make a report of
* percentage of different types documented
* percentage of different types with a bad name
"""
# percentage of different types documented and/or with a bad name
nice_stats = {}
for node_type in ('module', 'class', 'method', 'function'):
try:
total = stats[node_type]
except KeyError:
raise EmptyReport()
nice_stats[node_type] = {}
if total != 0:
try:
documented = total - stats['undocumented_'+node_type]
percent = (documented * 100.) / total
nice_stats[node_type]['percent_documented'] = '%.2f' % percent
except KeyError:
nice_stats[node_type]['percent_documented'] = 'NC'
try:
percent = (stats['badname_'+node_type] * 100.) / total
nice_stats[node_type]['percent_badname'] = '%.2f' % percent
except KeyError:
nice_stats[node_type]['percent_badname'] = 'NC'
lines = ('type', 'number', 'old number', 'difference',
'%documented', '%badname')
for node_type in ('module', 'class', 'method', 'function'):
new = stats[node_type]
old = old_stats.get(node_type, None)
if old is not None:
diff_str = diff_string(old, new)
else:
old, diff_str = 'NC', 'NC'
lines += (node_type, str(new), str(old), diff_str,
nice_stats[node_type].get('percent_documented', '0'),
nice_stats[node_type].get('percent_badname', '0'))
sect.append(Table(children=lines, cols=6, rheaders=1))
def redefined_by_decorator(node):
"""return True if the object is a method redefined via decorator.
For example:
@property
def x(self): return self._x
@x.setter
def x(self, value): self._x = value
"""
if node.decorators:
for decorator in node.decorators.nodes:
if (isinstance(decorator, astng.Getattr) and
getattr(decorator.expr, 'name', None) == node.name):
return True
return False
class _BasicChecker(BaseChecker):
__implements__ = IASTNGChecker
name = 'basic'
class BasicErrorChecker(_BasicChecker):
msgs = {
'E0100': ('__init__ method is a generator',
'init-is-generator',
'Used when the special class method __init__ is turned into a '
'generator by a yield in its body.'),
'E0101': ('Explicit return in __init__',
'return-in-init',
'Used when the special class method __init__ has an explicit \
return value.'),
'E0102': ('%s already defined line %s',
'function-redefined',
'Used when a function / class / method is redefined.'),
'E0103': ('%r not properly in loop',
'not-in-loop',
'Used when break or continue keywords are used outside a loop.'),
'E0104': ('Return outside function',
'return-outside-function',
'Used when a "return" statement is found outside a function or '
'method.'),
'E0105': ('Yield outside function',
'yield-outside-function',
'Used when a "yield" statement is found outside a function or '
'method.'),
'E0106': ('Return with argument inside generator',
'return-arg-in-generator',
'Used when a "return" statement with an argument is found '
'outside in a generator function or method (e.g. with some '
'"yield" statements).'),
'E0107': ("Use of the non-existent %s operator",
'nonexistent-operator',
"Used when you attempt to use the C-style pre-increment or"
"pre-decrement operator -- and ++, which doesn't exist in Python."),
'E0108': ('Duplicate argument name %s in function definition',
'duplicate-argument-name',
'Duplicate argument names in function definitions are syntax'
' errors.'),
'W0120': ('Else clause on loop without a break statement',
'useless-else-on-loop',
'Loops should only have an else clause if they can exit early '
'with a break statement, otherwise the statements under else '
'should be on the same scope as the loop itself.'),
}
def __init__(self, linter):
_BasicChecker.__init__(self, linter)
@check_messages('E0102')
def visit_class(self, node):
self._check_redefinition('class', node)
@check_messages('E0100', 'E0101', 'E0102', 'E0106', 'E0108')
def visit_function(self, node):
if not redefined_by_decorator(node):
self._check_redefinition(node.is_method() and 'method' or 'function', node)
# checks for max returns, branch, return in __init__
returns = node.nodes_of_class(astng.Return,
skip_klass=(astng.Function, astng.Class))
if node.is_method() and node.name == '__init__':
if node.is_generator():
self.add_message('E0100', node=node)
else:
values = [r.value for r in returns]
if [v for v in values if not (v is None or
(isinstance(v, astng.Const) and v.value is None)
or (isinstance(v, astng.Name) and v.name == 'None'))]:
self.add_message('E0101', node=node)
elif node.is_generator():
# make sure we don't mix non-None returns and yields
for retnode in returns:
if isinstance(retnode.value, astng.Const) and \
retnode.value.value is not None:
self.add_message('E0106', node=node,
line=retnode.fromlineno)
args = set()
for name in node.argnames():
if name in args:
self.add_message('E0108', node=node, args=(name,))
else:
args.add(name)
@check_messages('E0104')
def visit_return(self, node):
if not isinstance(node.frame(), astng.Function):
self.add_message('E0104', node=node)
@check_messages('E0105')
def visit_yield(self, node):
if not isinstance(node.frame(), (astng.Function, astng.Lambda)):
self.add_message('E0105', node=node)
@check_messages('E0103')
def visit_continue(self, node):
self._check_in_loop(node, 'continue')
@check_messages('E0103')
def visit_break(self, node):
self._check_in_loop(node, 'break')
@check_messages('W0120')
def visit_for(self, node):
self._check_else_on_loop(node)
@check_messages('W0120')
def visit_while(self, node):
self._check_else_on_loop(node)
@check_messages('E0107')
def visit_unaryop(self, node):
"""check use of the non-existent ++ adn -- operator operator"""
if ((node.op in '+-') and
isinstance(node.operand, astng.UnaryOp) and
(node.operand.op == node.op)):
self.add_message('E0107', node=node, args=node.op*2)
def _check_else_on_loop(self, node):
"""Check that any loop with an else clause has a break statement."""
if node.orelse and not _loop_exits_early(node):
self.add_message('W0120', node=node,
# This is not optimal, but the line previous
# to the first statement in the else clause
# will usually be the one that contains the else:.
line=node.orelse[0].lineno - 1)
def _check_in_loop(self, node, node_name):
"""check that a node is inside a for or while loop"""
_node = node.parent
while _node:
if isinstance(_node, (astng.For, astng.While)):
break
_node = _node.parent
else:
self.add_message('E0103', node=node, args=node_name)
def _check_redefinition(self, redeftype, node):
"""check for redefinition of a function / method / class name"""
defined_self = node.parent.frame()[node.name]
if defined_self is not node and not are_exclusive(node, defined_self):
self.add_message('E0102', node=node,
args=(redeftype, defined_self.fromlineno))
class BasicChecker(_BasicChecker):
"""checks for :
* doc strings
* modules / classes / functions / methods / arguments / variables name
* number of arguments, local variables, branches, returns and statements in
functions, methods
* required module attributes
* dangerous default values as arguments
* redefinition of function / method / class
* uses of the global statement
"""
__implements__ = IASTNGChecker
name = 'basic'
msgs = {
'W0101': ('Unreachable code',
'unreachable',
'Used when there is some code behind a "return" or "raise" \
statement, which will never be accessed.'),
'W0102': ('Dangerous default value %s as argument',
'dangerous-default-value',
'Used when a mutable value as list or dictionary is detected in \
a default value for an argument.'),
'W0104': ('Statement seems to have no effect',
'pointless-statement',
'Used when a statement doesn\'t have (or at least seems to) \
any effect.'),
'W0105': ('String statement has no effect',
'pointless-string-statement',
'Used when a string is used as a statement (which of course \
has no effect). This is a particular case of W0104 with its \
own message so you can easily disable it if you\'re using \
those strings as documentation, instead of comments.'),
'W0106': ('Expression "%s" is assigned to nothing',
'expression-not-assigned',
'Used when an expression that is not a function call is assigned\
to nothing. Probably something else was intended.'),
'W0108': ('Lambda may not be necessary',
'unnecessary-lambda',
'Used when the body of a lambda expression is a function call \
on the same argument list as the lambda itself; such lambda \
expressions are in all but a few cases replaceable with the \
function being called in the body of the lambda.'),
'W0109': ("Duplicate key %r in dictionary",
'duplicate-key',
"Used when a dictionary expression binds the same key multiple \
times."),
'W0122': ('Use of the exec statement',
'exec-statement',
'Used when you use the "exec" statement, to discourage its \
usage. That doesn\'t mean you can not use it !'),
'W0141': ('Used builtin function %r',
'bad-builtin',
'Used when a black listed builtin function is used (see the '
'bad-function option). Usual black listed functions are the ones '
'like map, or filter , where Python offers now some cleaner '
'alternative like list comprehension.'),
'W0142': ('Used * or ** magic',
'star-args',
'Used when a function or method is called using `*args` or '
'`**kwargs` to dispatch arguments. This doesn\'t improve '
'readability and should be used with care.'),
'W0150': ("%s statement in finally block may swallow exception",
'lost-exception',
"Used when a break or a return statement is found inside the \
finally clause of a try...finally block: the exceptions raised \
in the try clause will be silently swallowed instead of being \
re-raised."),
'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
'assert-on-tuple',
'A call of assert on a tuple will always evaluate to true if '
'the tuple is not empty, and will always evaluate to false if '
'it is.'),
'C0121': ('Missing required attribute "%s"', # W0103
'missing-module-attribute',
'Used when an attribute required for modules is missing.'),
}
options = (('required-attributes',
{'default' : (), 'type' : 'csv',
'metavar' : '<attributes>',
'help' : 'Required attributes for module, separated by a '
'comma'}
),
('bad-functions',
{'default' : ('map', 'filter', 'apply', 'input'),
'type' :'csv', 'metavar' : '<builtin function names>',
'help' : 'List of builtins function names that should not be '
'used, separated by a comma'}
),
)
reports = ( ('RP0101', 'Statistics by type', report_by_type_stats), )
def __init__(self, linter):
_BasicChecker.__init__(self, linter)
self.stats = None
self._tryfinallys = None
def open(self):
"""initialize visit variables and statistics
"""
self._tryfinallys = []
self.stats = self.linter.add_stats(module=0, function=0,
method=0, class_=0)
def visit_module(self, node):
"""check module name, docstring and required arguments
"""
self.stats['module'] += 1
for attr in self.config.required_attributes:
if attr not in node:
self.add_message('C0121', node=node, args=attr)
def visit_class(self, node):
"""check module name, docstring and redefinition
increment branch counter
"""
self.stats['class'] += 1
@check_messages('W0104', 'W0105')
def visit_discard(self, node):
"""check for various kind of statements without effect"""
expr = node.value
if isinstance(expr, astng.Const) and isinstance(expr.value,
str):
# treat string statement in a separated message
self.add_message('W0105', node=node)
return
# ignore if this is :
# * a direct function call
# * the unique child of a try/except body
# * a yield (which are wrapped by a discard node in _ast XXX)
# warn W0106 if we have any underlying function call (we can't predict
# side effects), else W0104
if (isinstance(expr, (astng.Yield, astng.CallFunc)) or
(isinstance(node.parent, astng.TryExcept) and
node.parent.body == [node])):
return
if any(expr.nodes_of_class(astng.CallFunc)):
self.add_message('W0106', node=node, args=expr.as_string())
else:
self.add_message('W0104', node=node)
@check_messages('W0108')
def visit_lambda(self, node):
"""check whether or not the lambda is suspicious
"""
# if the body of the lambda is a call expression with the same
# argument list as the lambda itself, then the lambda is
# possibly unnecessary and at least suspicious.
if node.args.defaults:
# If the arguments of the lambda include defaults, then a
# judgment cannot be made because there is no way to check
# that the defaults defined by the lambda are the same as
# the defaults defined by the function called in the body
# of the lambda.
return
call = node.body
if not isinstance(call, astng.CallFunc):
# The body of the lambda must be a function call expression
# for the lambda to be unnecessary.
return
# XXX are lambda still different with astng >= 0.18 ?
# *args and **kwargs need to be treated specially, since they
# are structured differently between the lambda and the function
# call (in the lambda they appear in the args.args list and are
# indicated as * and ** by two bits in the lambda's flags, but
# in the function call they are omitted from the args list and
# are indicated by separate attributes on the function call node).
ordinary_args = list(node.args.args)
if node.args.kwarg:
if (not call.kwargs
or not isinstance(call.kwargs, astng.Name)
or node.args.kwarg != call.kwargs.name):
return
elif call.kwargs:
return
if node.args.vararg:
if (not call.starargs
or not isinstance(call.starargs, astng.Name)
or node.args.vararg != call.starargs.name):
return
elif call.starargs:
return
# The "ordinary" arguments must be in a correspondence such that:
# ordinary_args[i].name == call.args[i].name.
if len(ordinary_args) != len(call.args):
return
for i in range(len(ordinary_args)):
if not isinstance(call.args[i], astng.Name):
return
if node.args.args[i].name != call.args[i].name:
return
self.add_message('W0108', line=node.fromlineno, node=node)
def visit_function(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
"""
self.stats[node.is_method() and 'method' or 'function'] += 1
# check for dangerous default values as arguments
for default in node.args.defaults:
try:
value = next(default.infer())
except astng.InferenceError:
continue
if (isinstance(value, astng.Instance) and
value.qname() in ('__builtin__.set', '__builtin__.dict', '__builtin__.list')):
if value is default:
msg = default.as_string()
elif type(value) is astng.Instance:
msg = '%s (%s)' % (default.as_string(), value.qname())
else:
msg = '%s (%s)' % (default.as_string(), value.as_string())
self.add_message('W0102', node=node, args=(msg,))
@check_messages('W0101', 'W0150')
def visit_return(self, node):
"""1 - check is the node has a right sibling (if so, that's some
unreachable code)
2 - check is the node is inside the finally clause of a try...finally
block
"""
self._check_unreachable(node)
# Is it inside final body of a try...finally bloc ?
self._check_not_in_finally(node, 'return', (astng.Function,))
@check_messages('W0101')
def visit_continue(self, node):
"""check is the node has a right sibling (if so, that's some unreachable
code)
"""
self._check_unreachable(node)
@check_messages('W0101', 'W0150')
def visit_break(self, node):
"""1 - check is the node has a right sibling (if so, that's some
unreachable code)
2 - check is the node is inside the finally clause of a try...finally
block
"""
# 1 - Is it right sibling ?
self._check_unreachable(node)
# 2 - Is it inside final body of a try...finally bloc ?
self._check_not_in_finally(node, 'break', (astng.For, astng.While,))
@check_messages('W0101')
def visit_raise(self, node):
"""check is the node has a right sibling (if so, that's some unreachable
code)
"""
self._check_unreachable(node)
@check_messages('W0122')
def visit_exec(self, node):
"""just print a warning on exec statements"""
self.add_message('W0122', node=node)
@check_messages('W0141', 'W0142')
def visit_callfunc(self, node):
"""visit a CallFunc node -> check if this is not a blacklisted builtin
call and check for * or ** use
"""
if isinstance(node.func, astng.Name):
name = node.func.name
# ignore the name if it's not a builtin (i.e. not defined in the
# locals nor globals scope)
if not (name in node.frame() or
name in node.root()):
if name in self.config.bad_functions:
self.add_message('W0141', node=node, args=name)
if node.starargs or node.kwargs:
scope = node.scope()
if isinstance(scope, astng.Function):
toprocess = [(n, vn) for (n, vn) in ((node.starargs, scope.args.vararg),
(node.kwargs, scope.args.kwarg)) if n]
if toprocess:
for cfnode, fargname in toprocess[:]:
if getattr(cfnode, 'name', None) == fargname:
toprocess.remove((cfnode, fargname))
if not toprocess:
return # W0142 can be skipped
self.add_message('W0142', node=node.func)
@check_messages('W0199')
def visit_assert(self, node):
"""check the use of an assert statement on a tuple."""
if node.fail is None and isinstance(node.test, astng.Tuple) and \
len(node.test.elts) == 2:
self.add_message('W0199', node=node)
@check_messages('W0109')
def visit_dict(self, node):
"""check duplicate key in dictionary"""
keys = set()
for k, _ in node.items:
if isinstance(k, astng.Const):
key = k.value
if key in keys:
self.add_message('W0109', node=node, args=key)
keys.add(key)
def visit_tryfinally(self, node):
"""update try...finally flag"""
self._tryfinallys.append(node)
def leave_tryfinally(self, node):
"""update try...finally flag"""
self._tryfinallys.pop()
def _check_unreachable(self, node):
"""check unreachable code"""
unreach_stmt = node.next_sibling()
if unreach_stmt is not None:
self.add_message('W0101', node=unreach_stmt)
def _check_not_in_finally(self, node, node_name, breaker_classes=()):
"""check that a node is not inside a finally clause of a
try...finally statement.
If we found before a try...finally bloc a parent which its type is
in breaker_classes, we skip the whole check."""
# if self._tryfinallys is empty, we're not a in try...finally bloc
if not self._tryfinallys:
return
# the node could be a grand-grand...-children of the try...finally
_parent = node.parent
_node = node
while _parent and not isinstance(_parent, breaker_classes):
if hasattr(_parent, 'finalbody') and _node in _parent.finalbody:
self.add_message('W0150', node=node, args=node_name)
return
_node = _parent
_parent = _node.parent
class NameChecker(_BasicChecker):
msgs = {
'C0102': ('Black listed name "%s"',
'blacklisted-name',
'Used when the name is listed in the black list (unauthorized \
names).'),
'C0103': ('Invalid name "%s" for type %s (should match %s)',
'invalid-name',
'Used when the name doesn\'t match the regular expression \
associated to its type (constant, variable, class...).'),
}
options = (('module-rgx',
{'default' : MOD_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'module names'}
),
('const-rgx',
{'default' : CONST_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'module level names'}
),
('class-rgx',
{'default' : CLASS_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'class names'}
),
('function-rgx',
{'default' : DEFAULT_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'function names'}
),
('method-rgx',
{'default' : DEFAULT_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'method names'}
),
('attr-rgx',
{'default' : DEFAULT_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'instance attribute names'}
),
('argument-rgx',
{'default' : DEFAULT_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'argument names'}),
('variable-rgx',
{'default' : DEFAULT_NAME_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'variable names'}
),
('inlinevar-rgx',
{'default' : COMP_VAR_RGX,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match correct '
'list comprehension / generator expression variable \
names'}
),
# XXX use set
('good-names',
{'default' : ('i', 'j', 'k', 'ex', 'Run', '_'),
'type' :'csv', 'metavar' : '<names>',
'help' : 'Good variable names which should always be accepted,'
' separated by a comma'}
),
('bad-names',
{'default' : ('foo', 'bar', 'baz', 'toto', 'tutu', 'tata'),
'type' :'csv', 'metavar' : '<names>',
'help' : 'Bad variable names which should always be refused, '
'separated by a comma'}
),
)
def open(self):
self.stats = self.linter.add_stats(badname_module=0,
badname_class=0, badname_function=0,
badname_method=0, badname_attr=0,
badname_const=0,
badname_variable=0,
badname_inlinevar=0,
badname_argument=0)
@check_messages('C0102', 'C0103')
def visit_module(self, node):
self._check_name('module', node.name.split('.')[-1], node)
@check_messages('C0102', 'C0103')
def visit_class(self, node):
self._check_name('class', node.name, node)
for attr, anodes in node.instance_attrs.items():
self._check_name('attr', attr, anodes[0])
@check_messages('C0102', 'C0103')
def visit_function(self, node):
self._check_name(node.is_method() and 'method' or 'function',
node.name, node)
# check arguments name
args = node.args.args
if args is not None:
self._recursive_check_names(args, node)
@check_messages('C0102', 'C0103')
def visit_assname(self, node):
"""check module level assigned names"""
frame = node.frame()
ass_type = node.ass_type()
if isinstance(ass_type, (astng.Comprehension, astng.Comprehension)):
self._check_name('inlinevar', node.name, node)
elif isinstance(frame, astng.Module):
if isinstance(ass_type, astng.Assign) and not in_loop(ass_type):
self._check_name('const', node.name, node)
elif isinstance(ass_type, astng.ExceptHandler):
self._check_name('variable', node.name, node)
elif isinstance(frame, astng.Function):
# global introduced variable aren't in the function locals
if node.name in frame:
self._check_name('variable', node.name, node)
def _recursive_check_names(self, args, node):
"""check names in a possibly recursive list <arg>"""
for arg in args:
if isinstance(arg, astng.AssName):
self._check_name('argument', arg.name, node)
else:
self._recursive_check_names(arg.elts, node)
def _check_name(self, node_type, name, node):
"""check for a name using the type's regexp"""
if is_inside_except(node):
clobbering, _ = clobber_in_except(node)
if clobbering:
return
if name in self.config.good_names:
return
if name in self.config.bad_names:
self.stats['badname_' + node_type] += 1
self.add_message('C0102', node=node, args=name)
return
regexp = getattr(self.config, node_type + '_rgx')
if regexp.match(name) is None:
type_label = {'inlinedvar': 'inlined variable',
'const': 'constant',
'attr': 'attribute',
}.get(node_type, node_type)
self.add_message('C0103', node=node, args=(name, type_label, regexp.pattern))
self.stats['badname_' + node_type] += 1
class DocStringChecker(_BasicChecker):
msgs = {
'C0111': ('Missing docstring', # W0131
'missing-docstring',
'Used when a module, function, class or method has no docstring.\
Some special methods like __init__ doesn\'t necessary require a \
docstring.'),
'C0112': ('Empty docstring', # W0132
'empty-docstring',
'Used when a module, function, class or method has an empty \
docstring (it would be too easy ;).'),
}
options = (('no-docstring-rgx',
{'default' : NO_REQUIRED_DOC_RGX,
'type' : 'regexp', 'metavar' : '<regexp>',
'help' : 'Regular expression which should only match '
'functions or classes name which do not require a '
'docstring'}
),
)
def open(self):
self.stats = self.linter.add_stats(undocumented_module=0,
undocumented_function=0,
undocumented_method=0,
undocumented_class=0)
def visit_module(self, node):
self._check_docstring('module', node)
def visit_class(self, node):
if self.config.no_docstring_rgx.match(node.name) is None:
self._check_docstring('class', node)
def visit_function(self, node):
if self.config.no_docstring_rgx.match(node.name) is None:
ftype = node.is_method() and 'method' or 'function'
if isinstance(node.parent.frame(), astng.Class):
overridden = False
# check if node is from a method overridden by its ancestor
for ancestor in node.parent.frame().ancestors():
if node.name in ancestor and \
isinstance(ancestor[node.name], astng.Function):
overridden = True
break
if not overridden:
self._check_docstring(ftype, node)
else:
self._check_docstring(ftype, node)
def _check_docstring(self, node_type, node):
"""check the node has a non empty docstring"""
docstring = node.doc
if docstring is None:
self.stats['undocumented_'+node_type] += 1
self.add_message('C0111', node=node)
elif not docstring.strip():
self.stats['undocumented_'+node_type] += 1
self.add_message('C0112', node=node)
class PassChecker(_BasicChecker):
"""check is the pass statement is really necessary"""
msgs = {'W0107': ('Unnecessary pass statement',
'unnecessary-pass',
'Used when a "pass" statement that can be avoided is '
'encountered.'),
}
def visit_pass(self, node):
if len(node.parent.child_sequence(node)) > 1:
self.add_message('W0107', node=node)
class LambdaForComprehensionChecker(_BasicChecker):
"""check for using a lambda where a comprehension would do.
See <http://www.artima.com/weblogs/viewpost.jsp?thread=98196>
where GvR says comprehensions would be clearer.
"""
msgs = {'W0110': ('map/filter on lambda could be replaced by comprehension',
'deprecated-lambda',
'Used when a lambda is the first argument to "map" or '
'"filter". It could be clearer as a list '
'comprehension or generator expression.'),
}
@check_messages('W0110')
def visit_callfunc(self, node):
"""visit a CallFunc node, check if map or filter are called with a
lambda
"""
if not node.args:
return
if not isinstance(node.args[0], astng.Lambda):
return
infered = safe_infer(node.func)
if (infered
and infered.parent.name == '__builtin__'
and infered.name in ['map', 'filter']):
self.add_message('W0110', node=node)
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(BasicErrorChecker(linter))
linter.register_checker(BasicChecker(linter))
linter.register_checker(NameChecker(linter))
linter.register_checker(DocStringChecker(linter))
linter.register_checker(PassChecker(linter))
linter.register_checker(LambdaForComprehensionChecker(linter))
| mit | 1,372,308,262,561,143,800 | 41.033557 | 94 | 0.551599 | false |
ideamonk/apt-offline | apt_offline_gui/Ui_AptOfflineQtAbout.py | 1 | 8800 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'AptOfflineQtAbout.ui'
#
# Created: Sun Nov 7 20:54:52 2010
# by: PyQt4 UI code generator 4.7.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_AboutAptOffline(object):
def setupUi(self, AboutAptOffline):
AboutAptOffline.setObjectName("AboutAptOffline")
AboutAptOffline.setWindowModality(QtCore.Qt.ApplicationModal)
AboutAptOffline.resize(526, 378)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(AboutAptOffline.sizePolicy().hasHeightForWidth())
AboutAptOffline.setSizePolicy(sizePolicy)
AboutAptOffline.setMinimumSize(QtCore.QSize(526, 378))
AboutAptOffline.setMaximumSize(QtCore.QSize(526, 378))
self.label = QtGui.QLabel(AboutAptOffline)
self.label.setGeometry(QtCore.QRect(12, 30, 511, 21))
font = QtGui.QFont()
font.setPointSize(16)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.tabWidget = QtGui.QTabWidget(AboutAptOffline)
self.tabWidget.setGeometry(QtCore.QRect(7, 90, 510, 241))
self.tabWidget.setObjectName("tabWidget")
self.aboutTab = QtGui.QWidget()
self.aboutTab.setObjectName("aboutTab")
self.label_3 = QtGui.QLabel(self.aboutTab)
self.label_3.setGeometry(QtCore.QRect(10, 20, 491, 31))
self.label_3.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_3.setWordWrap(True)
self.label_3.setObjectName("label_3")
self.label_14 = QtGui.QLabel(self.aboutTab)
self.label_14.setGeometry(QtCore.QRect(10, 46, 481, 61))
self.label_14.setWordWrap(True)
self.label_14.setObjectName("label_14")
self.tabWidget.addTab(self.aboutTab, "")
self.authorTab = QtGui.QWidget()
self.authorTab.setObjectName("authorTab")
self.label_4 = QtGui.QLabel(self.authorTab)
self.label_4.setGeometry(QtCore.QRect(10, 10, 111, 16))
self.label_4.setObjectName("label_4")
self.label_5 = QtGui.QLabel(self.authorTab)
self.label_5.setGeometry(QtCore.QRect(30, 30, 271, 16))
self.label_5.setObjectName("label_5")
self.label_6 = QtGui.QLabel(self.authorTab)
self.label_6.setGeometry(QtCore.QRect(10, 60, 131, 16))
self.label_6.setObjectName("label_6")
self.label_7 = QtGui.QLabel(self.authorTab)
self.label_7.setGeometry(QtCore.QRect(30, 80, 261, 16))
self.label_7.setObjectName("label_7")
self.label_8 = QtGui.QLabel(self.authorTab)
self.label_8.setGeometry(QtCore.QRect(30, 100, 271, 16))
self.label_8.setObjectName("label_8")
self.tabWidget.addTab(self.authorTab, "")
self.thanksTab = QtGui.QWidget()
self.thanksTab.setObjectName("thanksTab")
self.label_9 = QtGui.QLabel(self.thanksTab)
self.label_9.setGeometry(QtCore.QRect(10, 10, 221, 16))
self.label_9.setObjectName("label_9")
self.label_10 = QtGui.QLabel(self.thanksTab)
self.label_10.setGeometry(QtCore.QRect(10, 30, 141, 16))
self.label_10.setObjectName("label_10")
self.label_11 = QtGui.QLabel(self.thanksTab)
self.label_11.setGeometry(QtCore.QRect(10, 50, 161, 16))
self.label_11.setObjectName("label_11")
self.label_12 = QtGui.QLabel(self.thanksTab)
self.label_12.setGeometry(QtCore.QRect(10, 70, 161, 16))
self.label_12.setObjectName("label_12")
self.label_13 = QtGui.QLabel(self.thanksTab)
self.label_13.setGeometry(QtCore.QRect(10, 110, 301, 31))
self.label_13.setWordWrap(True)
self.label_13.setObjectName("label_13")
self.tabWidget.addTab(self.thanksTab, "")
self.licenseTab = QtGui.QWidget()
self.licenseTab.setObjectName("licenseTab")
self.licenseText = QtGui.QPlainTextEdit(self.licenseTab)
self.licenseText.setGeometry(QtCore.QRect(4, 4, 490, 203))
font = QtGui.QFont()
font.setPointSize(8)
self.licenseText.setFont(font)
self.licenseText.setObjectName("licenseText")
self.tabWidget.addTab(self.licenseTab, "")
self.label_2 = QtGui.QLabel(AboutAptOffline)
self.label_2.setGeometry(QtCore.QRect(10, 60, 511, 16))
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.pushButton = QtGui.QPushButton(AboutAptOffline)
self.pushButton.setGeometry(QtCore.QRect(416, 340, 101, 31))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/icons/dialog-cancel.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton.setIcon(icon)
self.pushButton.setObjectName("pushButton")
self.retranslateUi(AboutAptOffline)
self.tabWidget.setCurrentIndex(3)
QtCore.QObject.connect(self.pushButton, QtCore.SIGNAL("clicked()"), AboutAptOffline.close)
QtCore.QMetaObject.connectSlotsByName(AboutAptOffline)
def retranslateUi(self, AboutAptOffline):
AboutAptOffline.setWindowTitle(QtGui.QApplication.translate("AboutAptOffline", "About Apt-Offline", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("AboutAptOffline", "Apt-Offline", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("AboutAptOffline", "apt-offline is an Offline APT Package Manager for Debian and derivatives. ", None, QtGui.QApplication.UnicodeUTF8))
self.label_14.setText(QtGui.QApplication.translate("AboutAptOffline", "apt-offline can fully update/upgrade your disconnected Debian box without the need of connecting it to the network. \n"
"\n"
"This is a Graphical User Interface which exposes the functionality of apt-offline.", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.aboutTab), QtGui.QApplication.translate("AboutAptOffline", "About", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("AboutAptOffline", "Written by:", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("AboutAptOffline", "Ritesh Raj Sarraf <rrs@researchut.com>", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("AboutAptOffline", "GUI written by:", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("AboutAptOffline", "Manish Sinha <mail@manishsinha.net>", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("AboutAptOffline", "Abhishek Mishra <ideamonk@gmail.com>", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.authorTab), QtGui.QApplication.translate("AboutAptOffline", "Author", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("AboutAptOffline", "Peter Otten", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setText(QtGui.QApplication.translate("AboutAptOffline", "Duncan Booth", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("AboutAptOffline", "Simon Forman", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setText(QtGui.QApplication.translate("AboutAptOffline", "Dennis Lee Bieber", None, QtGui.QApplication.UnicodeUTF8))
self.label_13.setText(QtGui.QApplication.translate("AboutAptOffline", "The awesome Directi people for their office space required for the mini hackfests", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.thanksTab), QtGui.QApplication.translate("AboutAptOffline", "Thanks To", None, QtGui.QApplication.UnicodeUTF8))
self.licenseText.setPlainText(QtGui.QApplication.translate("AboutAptOffline", "LICENSE not found. Please contact the developers immediately.", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.licenseTab), QtGui.QApplication.translate("AboutAptOffline", "License", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("AboutAptOffline", "A GUI for apt-offline - an offline APT Package Manager", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setText(QtGui.QApplication.translate("AboutAptOffline", "Close", None, QtGui.QApplication.UnicodeUTF8))
import resources_rc
| gpl-3.0 | 6,399,191,816,789,832,000 | 65.165414 | 201 | 0.716364 | false |
zhoulan/google-python-exercises | basic/string1.py | 1 | 4177 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
# +++your code here+++
if count >= 0:
if count < 10:
return 'Number of donuts: ' + str(count)
else:
return 'Number of donuts: many'
else:
return 'Error in count'
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
# +++your code here+++
if len(s) >=2:
return s[:2] + s[-2:]
else:
return ''
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
# +++your code here+++
resultstr = s[0]
for i in range(1,len(s)):
if s[i] == s[0]:
resultstr += '*'
else:
resultstr += s[i]
return resultstr
def fix_start_solution(s):
front = s[0]
back = s[1:]
fixed_back = back.replace(front, "*")
return front + fixed_back
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
# +++your code here+++
return b[:2] + a[2:] + ' ' + a[:2] + b[2:]
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print 'donuts'
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
test(donuts(-2), 'Error in count')
print
print 'both_ends'
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print
print 'fix_start'
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print
print 'fix_start_solution'
test(fix_start_solution('babble'), 'ba**le')
test(fix_start_solution('aardvark'), 'a*rdv*rk')
test(fix_start_solution('google'), 'goo*le')
test(fix_start_solution('donut'), 'donut')
print
print 'mix_up'
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
| apache-2.0 | 343,722,256,654,909,760 | 29.268116 | 78 | 0.651903 | false |
gangadharkadam/tailorerp | erpnext/support/doctype/support_ticket/support_ticket.py | 1 | 8287 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.transaction_base import TransactionBase
from frappe.utils import now, extract_email_id
import json
import requests
STANDARD_USERS = ("Guest", "Administrator")
class SupportTicket(TransactionBase):
def get_sender(self, comm):
return frappe.db.get_value('Support Email Settings',None,'support_email')
def get_subject(self, comm):
return '[' + self.name + '] ' + (comm.subject or 'No Subject Specified')
def get_content(self, comm):
signature = frappe.db.get_value('Support Email Settings',None,'support_signature')
content = comm.content
if signature:
content += '<p>' + signature + '</p>'
return content
def get_portal_page(self):
return "ticket"
def on_update1(self):
from frappe.utils import get_url, cstr
frappe.errprint(get_url())
if get_url()=='http://tailorpad.com':
pass
else:
pr2 = frappe.db.sql("""select name from `tabSupport Ticket`""")
frappe.errprint(pr2)
frappe.errprint("is feed back saved")
if pr2:
# self.login()
frappe.errprint("in if for creation support ticket")
test = {}
support_ticket = self.get_ticket_details()
self.call_del_keys(support_ticket)
#test['communications'] = []
#self.call_del_keys(support_ticket.get('communications'), test)
self.login()
frappe.errprint("support_ticket")
frappe.errprint(support_ticket)
self.tenent_based_ticket_creation(support_ticket)
# def on_update(self):
# self.send_email()
def send_email(self):
frappe.errprint("in the sendmail")
from frappe.utils.user import get_user_fullname
from frappe.utils import get_url
if self.get("__islocal") and get_url()=='http://tailorpad.com':
# mail_titles = frappe.get_hooks().get("login_mail_title", [])
# title = frappe.db.get_default('company') or (mail_titles and mail_titles[0]) or ""
full_name = get_user_fullname(frappe.session['user'])
if full_name == "Guest":
full_name = "Administrator"
first_name = frappe.db.sql_list("""select first_name from `tabUser` where name='%s'"""%(self.raised_by))
frappe.errprint(first_name[0])
msg="Dear "+first_name[0]+"!<br><br>Support Ticket is created successfully for '"+self.subject+"'<br><br>Your Support Ticket Number is '"+self.name+"' <br><br>Please note for further information. <br><br>Regards, <br>Team TailorPad."
sender = frappe.session.user not in STANDARD_USERS and frappe.session.user or None
frappe.sendmail(recipients=self.raised_by, sender=sender, subject=self.subject,
message=msg)
def login(self):
login_details = {'usr': 'Administrator', 'pwd': 'admin'}
url = 'http://tailorpad.com/api/method/login'
headers = {'content-type': 'application/x-www-form-urlencoded'}
frappe.errprint([url, 'data='+json.dumps(login_details)])
response = requests.post(url, data='data='+json.dumps(login_details), headers=headers)
def get_ticket_details(self):
# return frappe.get_doc('Support Ticket', self.name)
response = requests.get("""%(url)s/api/resource/Support Ticket/SUP-00001"""%{'url':get_url()})
# frappe.errprint(["""%(url)s/api/resource/Support Ticket/%(name)s"""%{'url':get_url(), 'name':self.name}])
frappe.errprint(response.text)
return eval(response.text).get('data')
def call_del_keys(self, support_ticket):
if support_ticket:
if isinstance(support_ticket, dict):
self.del_keys(support_ticket)
if isinstance(support_ticket, list):
for comm in support_ticket:
self.del_keys(comm)
def del_keys(self, support_ticket):
frappe.errprint(type(support_ticket))
del support_ticket['name']
del support_ticket['creation']
del support_ticket['modified']
del support_ticket['company']
def tenent_based_ticket_creation(self, support_ticket):
frappe.errprint(support_ticket)
url = 'http://tailorpad.com/api/resource/Support Ticket'
#url = 'http://192.168.5.12:7676/api/method/login'
headers = {'content-type': 'application/x-www-form-urlencoded'}
frappe.errprint('data='+json.dumps(support_ticket))
response = requests.post(url, data='data='+json.dumps(support_ticket), headers=headers)
frappe.errprint(response)
frappe.errprint(response.text)
def validate(self):
self.update_status()
self.set_lead_contact(self.raised_by)
if self.status == "Closed":
from frappe.widgets.form.assign_to import clear
clear(self.doctype, self.name)
#self.on_update1()
self.send_email()
def set_lead_contact(self, email_id):
import email.utils
email_id = email.utils.parseaddr(email_id)
if email_id:
if not self.lead:
self.lead = frappe.db.get_value("Lead", {"email_id": email_id})
if not self.contact:
self.contact = frappe.db.get_value("Contact", {"email_id": email_id})
if not self.company:
self.company = frappe.db.get_value("Lead", self.lead, "company") or \
frappe.db.get_default("company")
def update_status(self):
status = frappe.db.get_value("Support Ticket", self.name, "status")
if self.status!="Open" and status =="Open" and not self.first_responded_on:
self.first_responded_on = now()
if self.status=="Closed" and status !="Closed":
self.resolution_date = now()
if self.status=="Open" and status !="Open":
# if no date, it should be set as None and not a blank string "", as per mysql strict config
self.resolution_date = None
@frappe.whitelist()
def set_status(name, status):
st = frappe.get_doc("Support Ticket", name)
st.status = status
st.save()
@frappe.whitelist()
def get_admin(name):
admin = frappe.db.sql("select email_id_admin from tabUser where name='administrator'")
frappe.errprint(admin)
frappe.errprint(frappe.session.get('user'))
if admin:
frappe.errprint("if")
return admin[0][0]
else:
frappe.errprint("else")
return frappe.session.get('user')
@frappe.whitelist()
def assing_future(name, assign_in_future,raised_by,assign_to):
frappe.errprint("in assign future")
from frappe.utils import get_url, cstr
if get_url()=='http://tailorpad.com':
check_entry = frappe.db.sql("""select assign_to from `tabAssing Master` where name = %s """, raised_by)
frappe.errprint("in assign")
if check_entry :
frappe.errprint("chk")
if assign_in_future=='No':
frappe.errprint("no")
frappe.db.sql("""delete from `tabAssing Master` where name = %s """, raised_by)
else :
frappe.errprint("Yes")
frappe.db.sql("""update `tabAssing Master` set assign_to=%s where name = %s """,(assign_to,raised_by))
else :
frappe.errprint("not chk")
if assign_in_future=='Yes':
frappe.errprint("Yes")
am = frappe.new_doc("Assing Master")
am.update({
"name": raised_by,
"assign_to": assign_to,
"raised_by":raised_by
})
am.insert()
def auto_close_tickets():
frappe.db.sql("""update `tabSupport Ticket` set status = 'Closed'
where status = 'Replied'
and date_sub(curdate(),interval 15 Day) > modified""")
@frappe.whitelist()
def reenable(name):
frappe.errprint("calling superadmin")
from frappe.utils import get_url, cstr
frappe.errprint(get_url())
if get_url()!='http://tailorpad.com':
frappe.errprint("in reenable")
from frappe.utils import get_url, cstr,add_months
from frappe import msgprint, throw, _
res = frappe.db.sql("select validity from `tabUser` where name='Administrator' and no_of_users >0")
if res:
res1 = frappe.db.sql("select validity_end_date from `tabUser` where '"+cstr(name)+"' and validity_end_date <CURDATE()")
if res1:
bc="update `tabUser` set validity_end_date=DATE_ADD((nowdate(), INTERVAL "+cstr(res[0][0])+" MONTH) where name = '"+cstr(name)+"'"
frappe.db.sql(bc)
frappe.db.sql("update `tabUser`set no_of_users=no_of_users-1 where name='Administrator'")
else:
ab="update `tabUser` set validity_end_date=DATE_ADD(validity_end_date,INTERVAL "+cstr(res[0][0])+" MONTH) where name = '"+cstr(name)+"' "
frappe.errprint(ab)
frappe.db.sql(ab)
frappe.db.sql("update `tabUser`set no_of_users=no_of_users-1 where name='Administrator'")
else:
frappe.throw(_("Your subscription plan expired .Please purchase an subscription plan and enable user."))
| agpl-3.0 | -8,289,523,798,107,514,000 | 34.26383 | 237 | 0.68879 | false |
afriestad/WikiLinks | kokekunster/settings_dev.py | 1 | 1482 | import os
from kokekunster.settings import BASE_DIR
# Settings for development environment
DEBUG = True
ALLOWED_HOSTS = ['*']
# "Secret" cryptographic key, only used during local development
SECRET_KEY = 'fc4_hb-wi32l^c&qpx6!m)o*xd(4ga$13(ese#pfj#pjxnmt0p'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# The development database requires postgresql to be installed on the machine.
# The following settings correspond to the default settings used by
# Postgres.app
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['USER'],
'USER': os.environ['USER'],
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '',
}
}
# For testing email
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = os.path.join(BASE_DIR, 'tmp', 'email')
ADMINS = (
('Test Testesen', 'admin_email@domain.tld'),
('Testinne Testesen', 'admin_email2@domain.tld'),
)
# User uploaded files (MEDIA)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Settings for 'dbbackup' app such that it is easy to import production data
# to the dev environment
DBBACKUP_STORAGE = 'django.core.files.storage.FileSystemStorage'
DBBACKUP_STORAGE_OPTIONS = {
'location': os.path.join(BASE_DIR, 'tmp'),
}
# Use the PK for the localhost Site model here
SITE_ID = 1
# IPs which have access to the Django debug toolbar
INTERNAL_IPS = ('127.0.0.1',)
| mit | 8,427,073,580,051,762,000 | 22.15625 | 78 | 0.687584 | false |
pFernbach/hpp-rbprm-corba | script/tools/parse_bench_contactGen.py | 1 | 1861 | import sys
totalSuccess = 0.
totalTime = 0.
totalMuscodConverg = 0.
totalMuscodWarmStartConverg = 0.
totalCrocConverg = 0.
totalConf = 0.
totalIt = 0.
f = open("/local/fernbac/bench_iros18/bench_contactGeneration/walk_noCroc.log","r")
line = f.readline()
while line.startswith("new"):
totalIt += 1.
line = f.readline()
t = line.rstrip("\n").split(" ")
assert t[0].startswith("success")
success = t[1].startswith("True")
if success:
totalSuccess += 1.
line = f.readline()
t = line.rstrip("\n").split(" ")
assert t[0].startswith("muscodNoWarmStart")
if t[1].startswith("True"):
totalMuscodConverg += 1.
line = f.readline()
t = line.rstrip("\n").split(" ")
if t[0].startswith("crocConverged"):
if t[1].startswith("True"):
totalCrocConverg +=1.
elif t[0].startswith("muscodWarmStart"):
if t[1].startswith("True"):
totalMuscodWarmStartConverg += 1.
else :
print "badly formated log"
line = f.readline()
t = line.rstrip("\n").split(" ")
assert t[0].startswith("time")
if success :
totalTime += float(t[1])
line = f.readline()
t = line.rstrip("\n").split(" ")
assert t[0].startswith("configs")
if success :
totalConf += int(t[1])
line = f.readline()
print "For : "+str(totalIt)+" runs : "
print "success contact generation : "+str((totalSuccess/totalIt)*100.)+ " %"
print "success muscod : "+str((totalMuscodConverg/totalSuccess)*100.)+ " %"
print "success muscod Warm Start : "+str((totalMuscodWarmStartConverg/totalSuccess)*100.)+ " %"
print "success croc converged : : "+str((totalCrocConverg/totalSuccess)*100.)+ " %"
print "avg time : "+str(totalTime/totalSuccess)+ "s"
print "avg configs : "+str(totalConf/totalSuccess)
| lgpl-3.0 | 5,070,445,143,479,115,000 | 32.232143 | 97 | 0.602902 | false |
LordDarkula/chess_py | chess_py/core/board.py | 1 | 13656 | # -*- coding: utf-8 -*-
"""
Constructs board object which stores the get_location of all the pieces.
Default Array
| [[0th row 0th item, 0th row 1st item, 0th row 2nd item],
| [1st row 0th item, 1st row 1st item, 1st row 2nd item],
| [2nd row 0th item, 2nd row 1st item, 2nd row 2nd item]]
| Default board
| 8 ║♜ ♞ ♝ ♛ ♚ ♝ ♞ ♜ Black pieces
| 7 ║♟ ♟ ♟ ♟ ♟ ♟ ♟ ♟ Black pawns
| 6 ║a6… … … … … …h6
| 5 ║… … … … … … … …
| 4 ║… … … … … … … …
| 3 ║a3… … … … … …h3 Algebraic
| 2 ║♙ ♙ ♙ ♙ ♙ ♙ ♙ ♙ White pawns
| 1 ║♖ ♘ ♗ ♕ ♔ ♗ ♘ ♖ White pieces
| -—╚═══════════════
| ——-a b c d e f g h
Pieces on the board are flipped in position array so white home row is at index 0
and black home row is at index 7
| Copyright © 2016 Aubhro Sengupta. All rights reserved.
"""
from __future__ import print_function
import inspect
from multiprocessing import Process
from copy import copy as cp
from math import fabs
from .color import white, black
from .algebraic import notation_const
from .algebraic.location import Location
from .algebraic.move import Move
from ..pieces.piece import Piece
from ..pieces.bishop import Bishop
from ..pieces.king import King
from ..pieces.pawn import Pawn
from ..pieces.queen import Queen
from ..pieces.rook import Rook
from ..pieces.knight import Knight
class Board:
"""
Standard starting position in a chess game.
Initialized upon startup and is used when init_default constructor is used
"""
def __init__(self, position):
"""
Creates a ``Board`` given an array of ``Piece`` and ``None``
objects to represent the given position of the board.
:type: position: list
"""
self.position = position
self.possible_moves = dict()
try:
self.king_loc_dict = {white: self.find_king(white),
black: self.find_king(black)}
except ValueError:
self.king_loc_dict = None
@classmethod
def init_default(cls):
"""
Creates a ``Board`` with the standard chess starting position.
:rtype: Board
"""
return cls([
# First rank
[Rook(white, Location(0, 0)), Knight(white, Location(0, 1)), Bishop(white, Location(0, 2)),
Queen(white, Location(0, 3)), King(white, Location(0, 4)), Bishop(white, Location(0, 5)),
Knight(white, Location(0, 6)), Rook(white, Location(0, 7))],
# Second rank
[Pawn(white, Location(1, file)) for file in range(8)],
# Third rank
[None for _ in range(8)],
# Fourth rank
[None for _ in range(8)],
# Fifth rank
[None for _ in range(8)],
# Sixth rank
[None for _ in range(8)],
# Seventh rank
[Pawn(black, Location(6, file)) for file in range(8)],
# Eighth rank
[Rook(black, Location(7, 0)), Knight(black, Location(7, 1)), Bishop(black, Location(7, 2)),
Queen(black, Location(7, 3)), King(black, Location(7, 4)), Bishop(black, Location(7, 5)),
Knight(black, Location(7, 6)), Rook(black, Location(7, 7))]
])
@property
def position_tuple(self):
return ((str(piece) for piece in self.position[index]) for index, row in enumerate(self.position))
def __key(self):
return self.position
def __hash__(self):
return hash(tuple([hash(piece) for piece in self]))
def __eq__(self, other):
if not isinstance(other, self.__class__):
raise TypeError("Cannot compare other type to Board")
for i, row in enumerate(self.position):
for j, piece in enumerate(row):
if piece != other.position[i][j]:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
board_string = ""
for i, row in enumerate(self.position):
board_string += str(8 - i) + " "
for j, square in enumerate(row):
piece = self.piece_at_square(Location(7 - i, j))
if isinstance(piece, Piece):
board_string += piece.symbol + " "
else:
board_string += "_ "
board_string += "\n"
board_string += " a b c d e f g h"
return board_string
def __iter__(self):
for row in self.position:
for square in row:
yield square
def __copy__(self):
"""
Copies the board faster than deepcopy
:rtype: Board
"""
return Board([[cp(piece) or None
for piece in self.position[index]]
for index, row in enumerate(self.position)])
def piece_at_square(self, location):
"""
Finds the chess piece at a square of the position.
:type: location: Location
:rtype: Piece
"""
return self.position[location.rank][location.file]
def is_square_empty(self, location):
"""
Finds whether a chess piece occupies a square of the position.
:type: location: Location
:rtype: bool
"""
return self.position[location.rank][location.file] is None
def material_advantage(self, input_color, val_scheme):
"""
Finds the advantage a particular side possesses given a value scheme.
:type: input_color: Color
:type: val_scheme: PieceValues
:rtype: double
"""
if self.get_king(input_color).in_check(self) and self.no_moves(input_color):
return -100
if self.get_king(-input_color).in_check(self) and self.no_moves(-input_color):
return 100
return sum([val_scheme.val(piece, input_color) for piece in self])
def advantage_as_result(self, move, val_scheme):
"""
Calculates advantage after move is played
:type: move: Move
:type: val_scheme: PieceValues
:rtype: double
"""
test_board = cp(self)
test_board.update(move)
return test_board.material_advantage(move.color, val_scheme)
def all_possible_moves(self, input_color):
"""
Checks if all the possible moves has already been calculated
and is stored in `possible_moves` dictionary. If not, it is calculated
with `_calc_all_possible_moves`.
:type: input_color: Color
:rtype: list
"""
position_tuple = self.position_tuple
if position_tuple not in self.possible_moves:
self.possible_moves[position_tuple] = tuple(self._calc_all_possible_moves(input_color))
return self.possible_moves[position_tuple]
def _calc_all_possible_moves(self, input_color):
"""
Returns list of all possible moves
:type: input_color: Color
:rtype: list
"""
for piece in self:
# Tests if square on the board is not empty
if piece is not None and piece.color == input_color:
for move in piece.possible_moves(self):
test = cp(self)
test_move = Move(end_loc=move.end_loc,
piece=test.piece_at_square(move.start_loc),
status=move.status,
start_loc=move.start_loc,
promoted_to_piece=move.promoted_to_piece)
test.update(test_move)
if self.king_loc_dict is None:
yield move
continue
my_king = test.piece_at_square(self.king_loc_dict[input_color])
if my_king is None or \
not isinstance(my_king, King) or \
my_king.color != input_color:
self.king_loc_dict[input_color] = test.find_king(input_color)
my_king = test.piece_at_square(self.king_loc_dict[input_color])
if not my_king.in_check(test):
yield move
def runInParallel(*fns):
"""
Runs multiple processes in parallel.
:type: fns: def
"""
proc = []
for fn in fns:
p = Process(target=fn)
p.start()
proc.append(p)
for p in proc:
p.join()
def no_moves(self, input_color):
# Loops through columns
for piece in self:
# Tests if square on the board is not empty
if piece is not None and piece.color == input_color:
for move in piece.possible_moves(self):
test = cp(self)
test.update(move)
if not test.get_king(input_color).in_check(test):
return False
return True
def find_piece(self, piece):
"""
Finds Location of the first piece that matches piece.
If none is found, Exception is raised.
:type: piece: Piece
:rtype: Location
"""
for i, _ in enumerate(self.position):
for j, _ in enumerate(self.position):
loc = Location(i, j)
if not self.is_square_empty(loc) and \
self.piece_at_square(loc) == piece:
return loc
raise ValueError("{} \nPiece not found: {}".format(self, piece))
def get_piece(self, piece_type, input_color):
"""
Gets location of a piece on the board given the type and color.
:type: piece_type: Piece
:type: input_color: Color
:rtype: Location
"""
for loc in self:
piece = self.piece_at_square(loc)
if not self.is_square_empty(loc) and \
isinstance(piece, piece_type) and \
piece.color == input_color:
return loc
raise Exception("{} \nPiece not found: {}".format(self, piece_type))
def find_king(self, input_color):
"""
Finds the Location of the King of input_color
:type: input_color: Color
:rtype: Location
"""
return self.find_piece(King(input_color, Location(0, 0)))
def get_king(self, input_color):
"""
Returns King of input_color
:type: input_color: Color
:rtype: King
"""
return self.piece_at_square(self.find_king(input_color))
def remove_piece_at_square(self, location):
"""
Removes piece at square
:type: location: Location
"""
self.position[location.rank][location.file] = None
def place_piece_at_square(self, piece, location):
"""
Places piece at given get_location
:type: piece: Piece
:type: location: Location
"""
self.position[location.rank][location.file] = piece
piece.location = location
def move_piece(self, initial, final):
"""
Moves piece from one location to another
:type: initial: Location
:type: final: Location
"""
self.place_piece_at_square(self.piece_at_square(initial), final)
self.remove_piece_at_square(initial)
def update(self, move):
"""
Updates position by applying selected move
:type: move: Move
"""
if move is None:
raise TypeError("Move cannot be type None")
if self.king_loc_dict is not None and isinstance(move.piece, King):
self.king_loc_dict[move.color] = move.end_loc
# Invalidates en-passant
for square in self:
pawn = square
if isinstance(pawn, Pawn):
pawn.just_moved_two_steps = False
# Sets King and Rook has_moved property to True is piece has moved
if type(move.piece) is King or type(move.piece) is Rook:
move.piece.has_moved = True
elif move.status == notation_const.MOVEMENT and \
isinstance(move.piece, Pawn) and \
fabs(move.end_loc.rank - move.start_loc.rank) == 2:
move.piece.just_moved_two_steps = True
if move.status == notation_const.KING_SIDE_CASTLE:
self.move_piece(Location(move.end_loc.rank, 7), Location(move.end_loc.rank, 5))
self.piece_at_square(Location(move.end_loc.rank, 5)).has_moved = True
elif move.status == notation_const.QUEEN_SIDE_CASTLE:
self.move_piece(Location(move.end_loc.rank, 0), Location(move.end_loc.rank, 3))
self.piece_at_square(Location(move.end_loc.rank, 3)).has_moved = True
elif move.status == notation_const.EN_PASSANT:
self.remove_piece_at_square(Location(move.start_loc.rank, move.end_loc.file))
elif move.status == notation_const.PROMOTE or \
move.status == notation_const.CAPTURE_AND_PROMOTE:
try:
self.remove_piece_at_square(move.start_loc)
self.place_piece_at_square(move.promoted_to_piece(move.color, move.end_loc), move.end_loc)
except TypeError as e:
raise ValueError("Promoted to piece cannot be None in Move {}\n{}".format(repr(move), e))
return
self.move_piece(move.piece.location, move.end_loc)
| mit | -1,198,334,598,218,724,400 | 30.571429 | 106 | 0.550108 | false |
nke001/attention-lvcsr | lvsr/extensions.py | 1 | 3223 | """Nice small extensions that maybe will it make to Blocks at some point."""
from __future__ import print_function
import subprocess
import pkgutil
import math
from theano.scan_module.scan_op import Scan
from blocks.extensions import TrainingExtension, SimpleExtension
class CGStatistics(SimpleExtension):
def __init__(self, **kwargs):
kwargs.setdefault('before_first_epoch', True)
kwargs.setdefault('on_resumption', True)
super(CGStatistics, self).__init__(**kwargs)
def do(self, *args, **kwargs):
print("Computation graph statistics:")
scan_nodes = [
node for node in self.main_loop.algorithm._function.maker.fgraph.apply_nodes
if isinstance(node.op, Scan)]
print("\tnumber of scan nodes:", len(scan_nodes))
class CodeVersion(SimpleExtension):
def __init__(self, packages, **kwargs):
self.packages = packages
kwargs.setdefault('before_training', True)
super(CodeVersion, self).__init__(**kwargs)
def do(self, *args, **kwargs):
package_paths = {name: loader.path
for loader, name, _ in pkgutil.iter_modules()}
for package in self.packages:
path = package_paths[package]
last_commit_record = "_{}_last_commit".format(package)
git_diff_record = "_{}_git_diff".format(package)
self.main_loop.log.status[last_commit_record] = (
subprocess.check_output("git --no-pager log -1",
cwd=path, shell=True))
self.main_loop.log.status[git_diff_record] = (
subprocess.check_output("git diff",
cwd=path, shell=True))
class IPDB(SimpleExtension):
def do(self, *args, **kwargs):
import ipdb; ipdb.set_trace()
class AdaptiveClipping(TrainingExtension):
def __init__(self, log_record, clipping_rule,
initial_threshold, burnin_period=100, decay_rate=0.99):
self.log_record = log_record
self.clipping_rule = clipping_rule
self.initial_threshold = initial_threshold
self.burnin_period = burnin_period
self.decay_rate = decay_rate
self.mean_gradient_norm = self.mean_gradient_norm2 = .0
def after_batch(self, batch):
gradient_norm = math.log(self.main_loop.log.current_row[self.log_record])
self.mean_gradient_norm = (self.decay_rate * self.mean_gradient_norm
+ (1 - self.decay_rate) * gradient_norm)
self.mean_gradient_norm2 = (self.decay_rate * self.mean_gradient_norm2
+ (1 - self.decay_rate) * gradient_norm ** 2)
self.std_gradient_norm = (
(self.mean_gradient_norm2 - self.mean_gradient_norm ** 2) ** .5)
threshold = math.exp(self.mean_gradient_norm + 1 * self.std_gradient_norm)
confidence = (min(
self.burnin_period, self.main_loop.status['iterations_done']) /
float(self.burnin_period))
threshold = (confidence * threshold +
(1 - confidence) * self.initial_threshold)
self.clipping_rule.threshold.set_value(threshold)
| mit | -8,607,135,214,068,613,000 | 39.2875 | 88 | 0.603785 | false |