repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
jdugge/QGIS | tests/src/python/test_qgsserver_wfs.py | 7 | 34015 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer WFS.
From build dir, run: ctest -R PyQgsServerWFS -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'René-Luc Dhont'
__date__ = '19/09/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all executions
os.environ['QT_HASH_SEED'] = '1'
import re
import urllib.request
import urllib.parse
import urllib.error
from qgis.server import QgsServerRequest
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
from qgis.core import (
QgsVectorLayer,
QgsFeatureRequest,
QgsExpression,
QgsCoordinateReferenceSystem,
QgsCoordinateTransform,
QgsCoordinateTransformContext,
QgsGeometry,
)
import osgeo.gdal # NOQA
from test_qgsserver import QgsServerTestBase
# Strip path and content length because path may vary
RE_STRIP_UNCHECKABLE = br'MAP=[^"]+|Content-Length: \d+|timeStamp="[^"]+"'
RE_ATTRIBUTES = br'[^>\s]+=[^>\s]+'
class TestQgsServerWFS(QgsServerTestBase):
"""QGIS Server WFS Tests"""
# Set to True in child classes to re-generate reference files for this class
regenerate_reference = False
def wfs_request_compare(self,
request, version='',
extra_query_string='',
reference_base_name=None,
project_file="test_project_wfs.qgs",
requestMethod=QgsServerRequest.GetMethod,
data=None):
project = self.testdata_path + project_file
assert os.path.exists(project), "Project file not found: " + project
query_string = '?MAP=%s&SERVICE=WFS&REQUEST=%s' % (
urllib.parse.quote(project), request)
if version:
query_string += '&VERSION=%s' % version
if extra_query_string:
query_string += '&%s' % extra_query_string
header, body = self._execute_request(
query_string, requestMethod=requestMethod, data=data)
self.assert_headers(header, body)
response = header + body
if reference_base_name is not None:
reference_name = reference_base_name
else:
reference_name = 'wfs_' + request.lower()
if version == '1.0.0':
reference_name += '_1_0_0'
reference_name += '.txt'
reference_path = self.testdata_path + reference_name
self.store_reference(reference_path, response)
f = open(reference_path, 'rb')
expected = f.read()
f.close()
response = re.sub(RE_STRIP_UNCHECKABLE, b'', response)
expected = re.sub(RE_STRIP_UNCHECKABLE, b'', expected)
self.assertXMLEqual(response, expected, msg="request %s failed.\n Query: %s" % (
query_string, request))
return header, body
def test_operation_not_supported(self):
qs = '?MAP=%s&SERVICE=WFS&VERSION=1.1.0&REQUEST=NotAValidRequest' % urllib.parse.quote(self.projectPath)
self._assert_status_code(501, qs)
def test_project_wfs(self):
"""Test some WFS request"""
for request in ('GetCapabilities', 'DescribeFeatureType'):
self.wfs_request_compare(request)
self.wfs_request_compare(request, '1.0.0')
def wfs_getfeature_compare(self, requestid, request):
project = self.testdata_path + "test_project_wfs.qgs"
assert os.path.exists(project), "Project file not found: " + project
query_string = '?MAP=%s&SERVICE=WFS&VERSION=1.0.0&REQUEST=%s' % (
urllib.parse.quote(project), request)
header, body = self._execute_request(query_string)
if requestid == 'hits':
body = re.sub(br'timeStamp="\d+-\d+-\d+T\d+:\d+:\d+"',
b'timeStamp="****-**-**T**:**:**"', body)
self.result_compare(
'wfs_getfeature_' + requestid + '.txt',
"request %s failed.\n Query: %s" % (
query_string,
request,
),
header, body
)
def test_getfeature(self):
tests = []
tests.append(('nobbox', 'GetFeature&TYPENAME=testlayer'))
tests.append(
('startindex2', 'GetFeature&TYPENAME=testlayer&STARTINDEX=2'))
tests.append(('limit2', 'GetFeature&TYPENAME=testlayer&MAXFEATURES=2'))
tests.append(
('start1_limit1', 'GetFeature&TYPENAME=testlayer&MAXFEATURES=1&STARTINDEX=1'))
tests.append(
('srsname', 'GetFeature&TYPENAME=testlayer&SRSNAME=EPSG:3857'))
tests.append(('sortby', 'GetFeature&TYPENAME=testlayer&SORTBY=id D'))
tests.append(('hits', 'GetFeature&TYPENAME=testlayer&RESULTTYPE=hits'))
for id, req in tests:
self.wfs_getfeature_compare(id, req)
def test_wfs_getcapabilities_100_url(self):
"""Check that URL in GetCapabilities response is complete"""
# empty url in project
project = os.path.join(
self.testdata_path, "test_project_without_urls.qgs")
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(project),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities"
}.items())])
r, h = self._result(self._execute_request(qs))
for item in str(r).split("\\n"):
if "onlineResource" in item:
self.assertEqual("onlineResource=\"?" in item, True)
# url well defined in query string
project = os.path.join(
self.testdata_path, "test_project_without_urls.qgs")
qs = "https://www.qgis-server.org?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(project),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities"
}.items())])
r, h = self._result(self._execute_request(qs))
for item in str(r).split("\\n"):
if "onlineResource" in item:
self.assertTrue(
"onlineResource=\"https://www.qgis-server.org?" in item, True)
# url well defined in project
project = os.path.join(
self.testdata_path, "test_project_with_urls.qgs")
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(project),
"SERVICE": "WFS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities"
}.items())])
r, h = self._result(self._execute_request(qs))
for item in str(r).split("\\n"):
if "onlineResource" in item:
self.assertEqual(
"onlineResource=\"my_wfs_advertised_url\"" in item, True)
def result_compare(self, file_name, error_msg_header, header, body):
self.assert_headers(header, body)
response = header + body
reference_path = self.testdata_path + file_name
self.store_reference(reference_path, response)
f = open(reference_path, 'rb')
expected = f.read()
f.close()
response = re.sub(RE_STRIP_UNCHECKABLE, b'', response)
expected = re.sub(RE_STRIP_UNCHECKABLE, b'', expected)
self.assertXMLEqual(response, expected, msg="%s\n" %
(error_msg_header))
def wfs_getfeature_post_compare(self, requestid, request):
project = self.testdata_path + "test_project_wfs.qgs"
assert os.path.exists(project), "Project file not found: " + project
query_string = '?MAP={}'.format(urllib.parse.quote(project))
header, body = self._execute_request(
query_string, requestMethod=QgsServerRequest.PostMethod, data=request.encode('utf-8'))
self.result_compare(
'wfs_getfeature_{}.txt'.format(requestid),
"GetFeature in POST for '{}' failed.".format(requestid),
header, body,
)
def test_getfeature_post(self):
tests = []
template = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('nobbox_post', template.format("")))
tests.append(('startindex2_post', template.format('startIndex="2"')))
tests.append(('limit2_post', template.format('maxFeatures="2"')))
tests.append(('start1_limit1_post', template.format(
'startIndex="1" maxFeatures="1"')))
srsTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('srsname_post', srsTemplate.format("")))
# Issue https://github.com/qgis/QGIS/issues/36398
# Check get feature within polygon having srsName=EPSG:4326 (same as the project/layer)
within4326FilterTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:4326" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<Within>
<PropertyName>geometry</PropertyName>
<Polygon xmlns="http://www.opengis.net/gml" srsName="EPSG:4326">
<exterior>
<LinearRing>
<posList srsDimension="2">
8.20344131 44.90137909
8.20347748 44.90137909
8.20347748 44.90141005
8.20344131 44.90141005
8.20344131 44.90137909
</posList>
</LinearRing>
</exterior>
</Polygon>
</Within>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('within4326FilterTemplate_post', within4326FilterTemplate.format("")))
# Check get feature within polygon having srsName=EPSG:3857 (different from the project/layer)
# The coordinates are converted from the one in 4326
within3857FilterTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<Within>
<PropertyName>geometry</PropertyName>
<Polygon xmlns="http://www.opengis.net/gml" srsName="EPSG:3857">
<exterior>
<LinearRing>
<posList srsDimension="2">
913202.90938171 5606008.98136456
913206.93580769 5606008.98136456
913206.93580769 5606013.84701639
913202.90938171 5606013.84701639
913202.90938171 5606008.98136456
</posList>
</LinearRing>
</exterior>
</Polygon>
</Within>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('within3857FilterTemplate_post', within3857FilterTemplate.format("")))
srsTwoLayersTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('srs_two_layers_post', srsTwoLayersTemplate.format("")))
sortTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
<ogc:SortBy>
<ogc:SortProperty>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:SortOrder>DESC</ogc:SortOrder>
</ogc:SortProperty>
</ogc:SortBy>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('sortby_post', sortTemplate.format("")))
andTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('and_post', andTemplate.format("")))
andBboxTemplate = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:And>
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>890555.92634619 5465442.18332275</gml:lowerCorner>
<gml:upperCorner>1001875.41713946 5621521.48619207</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsGreaterThan>
<ogc:PropertyIsLessThan>
<ogc:PropertyName>id</ogc:PropertyName>
<ogc:Literal>3</ogc:Literal>
</ogc:PropertyIsLessThan>
</ogc:And>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('bbox_inside_and_post', andBboxTemplate.format("")))
# With namespace
template = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="feature:testlayer" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('nobbox_post', template.format("")))
template = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature service="WFS" version="1.0.0" {} xmlns:wfs="http://www.opengis.net/wfs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<wfs:Query typeName="testlayer" xmlns="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:BBOX>
<ogc:PropertyName>geometry</ogc:PropertyName>
<gml:Envelope xmlns:gml="http://www.opengis.net/gml">
<gml:lowerCorner>8 44</gml:lowerCorner>
<gml:upperCorner>9 45</gml:upperCorner>
</gml:Envelope>
</ogc:BBOX>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
tests.append(('nobbox_post', template.format("")))
for id, req in tests:
self.wfs_getfeature_post_compare(id, req)
def test_getFeatureBBOX(self):
"""Test with (1.1.0) and without (1.0.0) CRS"""
# Tests without CRS
self.wfs_request_compare(
"GetFeature", '1.0.0', "TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.20347,44.901471,8.2035354,44.901493", 'wfs_getFeature_1_0_0_bbox_1_feature')
self.wfs_request_compare(
"GetFeature", '1.0.0', "TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.203127,44.9012765,8.204138,44.901632", 'wfs_getFeature_1_0_0_bbox_3_feature')
# Tests with CRS
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.20347,44.901471,8.2035354,44.901493,EPSG:4326", 'wfs_getFeature_1_0_0_epsgbbox_1_feature')
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.203127,44.9012765,8.204138,44.901632,EPSG:4326", 'wfs_getFeature_1_0_0_epsgbbox_3_feature')
self.wfs_request_compare(
"GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.20347,44.901471,8.2035354,44.901493,EPSG:4326", 'wfs_getFeature_1_1_0_epsgbbox_1_feature')
self.wfs_request_compare(
"GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=8.203127,44.9012765,8.204138,44.901632,EPSG:4326", 'wfs_getFeature_1_1_0_epsgbbox_3_feature')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_1_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_1_feature_3857')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_0_0_epsgbbox_1_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913144,5605992,913303,5606048,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_3_feature_3857')
self.wfs_request_compare("GetFeature", '1.1.0', "SRSNAME=EPSG:3857&TYPENAME=testlayer&RESULTTYPE=hits&BBOX=913206,5606024,913213,5606026,EPSG:3857",
'wfs_getFeature_1_1_0_epsgbbox_1_feature_3857')
def test_getFeatureFeatureId(self):
"""Test GetFeature with featureid"""
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0", 'wfs_getFeature_1_0_0_featureid_0')
def test_getFeature_EXP_FILTER_regression_20927(self):
"""Test expressions with EXP_FILTER"""
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0&EXP_FILTER=\"name\"='one'", 'wfs_getFeature_1_0_0_EXP_FILTER_FID_one')
# Note that FEATUREID takes precedence over EXP_FILTER and the filter is completely ignored when FEATUREID is set
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0&EXP_FILTER=\"name\"='two'", 'wfs_getFeature_1_0_0_EXP_FILTER_FID_one')
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"='two'", 'wfs_getFeature_1_0_0_EXP_FILTER_two')
self.wfs_request_compare(
"GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"=concat('tw', 'o')", 'wfs_getFeature_1_0_0_EXP_FILTER_two')
# Syntax ok but function does not exist
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"=invalid_expression('tw', 'o')",
'wfs_getFeature_1_0_0_EXP_FILTER_invalid_expression')
# Syntax error in exp
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=\"name\"=concat('tw, 'o')",
'wfs_getFeature_1_0_0_EXP_FILTER_syntax_error')
# BBOX gml expressions
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=intersects($geometry, geom_from_gml('<gml:Box> <gml:coordinates cs=\",\" ts=\" \">8.20344750430995617,44.9013881888184514 8.20347909100379269,44.90140004005827024</gml:coordinates></gml:Box>'))", 'wfs_getFeature_1_0_0_EXP_FILTER_gml_bbox_three')
self.wfs_request_compare("GetFeature", '1.0.0', "SRSNAME=EPSG:4326&TYPENAME=testlayer&EXP_FILTER=intersects($geometry, geom_from_gml('<gml:Box> <gml:coordinates cs=\",\" ts=\" \">8.20348458304175665,44.90147459621791626 8.20351616973559317,44.9014864474577351</gml:coordinates></gml:Box>'))", 'wfs_getFeature_1_0_0_EXP_FILTER_gml_bbox_one')
def test_describeFeatureType(self):
"""Test DescribeFeatureType with TYPENAME filters"""
project_file = "test_project_wms_grouped_layers.qgs"
self.wfs_request_compare("DescribeFeatureType", '1.0.0', "TYPENAME=as_areas&",
'wfs_describeFeatureType_1_0_0_typename_as_areas', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "TYPENAME=as_areas&",
'wfs_describeFeatureType_1_1_0_typename_as_areas', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.0.0', "",
'wfs_describeFeatureType_1_0_0_typename_empty', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "",
'wfs_describeFeatureType_1_1_0_typename_empty', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.0.0', "TYPENAME=does_not_exist&",
'wfs_describeFeatureType_1_0_0_typename_wrong', project_file=project_file)
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "TYPENAME=does_not_exist&",
'wfs_describeFeatureType_1_1_0_typename_wrong', project_file=project_file)
def test_describeFeatureTypeVirtualFields(self):
"""Test DescribeFeatureType with virtual fields: bug GH-29767"""
project_file = "bug_gh29767_double_vfield.qgs"
self.wfs_request_compare("DescribeFeatureType", '1.1.0', "",
'wfs_describeFeatureType_1_1_0_virtual_fields', project_file=project_file)
def test_getFeatureFeature_0_nulls(self):
"""Test that 0 and null in integer columns are reported correctly"""
# Test transactions with 0 and nulls
post_data = """<?xml version="1.0" ?>
<wfs:Transaction service="WFS" version="{version}"
xmlns:ogc="http://www.opengis.net/ogc"
xmlns:wfs="http://www.opengis.net/wfs"
xmlns:gml="http://www.opengis.net/gml">
<wfs:Update typeName="cdb_lines">
<wfs:Property>
<wfs:Name>{field}</wfs:Name>
<wfs:Value>{value}</wfs:Value>
</wfs:Property>
<fes:Filter>
<fes:FeatureId fid="cdb_lines.22"/>
</fes:Filter>
</wfs:Update>
</wfs:Transaction>
"""
def _round_trip(value, field, version='1.1.0'):
"""Set a value on fid 22 and field and check it back"""
encoded_data = post_data.format(field=field, value=value, version=version).encode('utf8')
# Strip the field if NULL
if value is None:
encoded_data = encoded_data.replace(b'<wfs:Value>None</wfs:Value>', b'')
header, body = self._execute_request("?MAP=%s&SERVICE=WFS&VERSION=%s" % (
self.testdata_path + 'test_project_wms_grouped_layers.qgs', version), QgsServerRequest.PostMethod, encoded_data)
if version == '1.0.0':
self.assertTrue(b'<SUCCESS/>' in body, body)
else:
self.assertTrue(b'<totalUpdated>1</totalUpdated>' in body, body)
header, body = self._execute_request("?MAP=%s&SERVICE=WFS&REQUEST=GetFeature&TYPENAME=cdb_lines&FEATUREID=cdb_lines.22" % (
self.testdata_path + 'test_project_wms_grouped_layers.qgs'))
if value is not None:
xml_value = '<qgs:{0}>{1}</qgs:{0}>'.format(field, value).encode('utf8')
self.assertTrue(xml_value in body, "%s not found in body" % xml_value)
else:
xml_value = '<qgs:{0}>'.format(field).encode('utf8')
self.assertFalse(xml_value in body)
# Check the backend
vl = QgsVectorLayer(
self.testdata_path + 'test_project_wms_grouped_layers.gpkg|layername=cdb_lines', 'vl', 'ogr')
self.assertTrue(vl.isValid())
self.assertEqual(
str(vl.getFeature(22)[field]), value if value is not None else 'NULL')
for version in ('1.0.0', '1.1.0'):
_round_trip('0', 'id_long', version)
_round_trip('12345', 'id_long', version)
_round_trip('0', 'id', version)
_round_trip('12345', 'id', version)
_round_trip(None, 'id', version)
_round_trip(None, 'id_long', version)
# "name" is NOT NULL: try to set it to empty string
_round_trip('', 'name', version)
# Then NULL
data = post_data.format(field='name', value='', version=version).encode('utf8')
encoded_data = data.replace(b'<wfs:Value></wfs:Value>', b'')
header, body = self._execute_request("?MAP=%s&SERVICE=WFS" % (
self.testdata_path + 'test_project_wms_grouped_layers.qgs'), QgsServerRequest.PostMethod, encoded_data)
if version == '1.0.0':
self.assertTrue(b'<ERROR/>' in body, body)
else:
self.assertTrue(b'<totalUpdated>0</totalUpdated>' in body)
self.assertTrue(b'<Message>NOT NULL constraint error on layer \'cdb_lines\', field \'name\'</Message>' in body, body)
def test_describeFeatureTypeGeometryless(self):
"""Test DescribeFeatureType with geometryless tables - bug GH-30381"""
project_file = "test_project_geometryless_gh30381.qgs"
self.wfs_request_compare("DescribeFeatureType", '1.1.0',
reference_base_name='wfs_describeFeatureType_1_1_0_geometryless',
project_file=project_file)
def test_getFeatureFeatureIdJson(self):
"""Test GetFeature with featureid JSON format and various content types"""
for ct in ('GeoJSON', 'application/vnd.geo+json', 'application/json', 'application/geo+json'):
self.wfs_request_compare(
"GetFeature",
'1.0.0',
("OUTPUTFORMAT=%s" % ct)
+ "&SRSNAME=EPSG:4326&TYPENAME=testlayer&FEATUREID=testlayer.0",
'wfs_getFeature_1_0_0_featureid_0_json')
def test_insert_srsName(self):
"""Test srsName is respected when insering"""
post_data = """
<Transaction xmlns="http://www.opengis.net/wfs" xsi:schemaLocation="http://www.qgis.org/gml http://localhost:8000/?SERVICE=WFS&REQUEST=DescribeFeatureType&VERSION=1.0.0&TYPENAME=as_symbols" service="WFS" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="{version}" xmlns:gml="http://www.opengis.net/gml">
<Insert xmlns="http://www.opengis.net/wfs">
<as_symbols xmlns="http://www.qgis.org/gml">
<name xmlns="http://www.qgis.org/gml">{name}</name>
<geometry xmlns="http://www.qgis.org/gml">
<gml:Point srsName="{srsName}">
<gml:coordinates cs="," ts=" ">{coordinates}</gml:coordinates>
</gml:Point>
</geometry>
</as_symbols>
</Insert>
</Transaction>
"""
project = self.testdata_path + \
"test_project_wms_grouped_layers.qgs"
assert os.path.exists(project), "Project file not found: " + project
query_string = '?SERVICE=WFS&MAP={}'.format(
urllib.parse.quote(project))
request = post_data.format(
name='4326-test1',
version='1.1.0',
srsName='EPSG:4326',
coordinates='10.67,52.48'
)
header, body = self._execute_request(
query_string, requestMethod=QgsServerRequest.PostMethod, data=request.encode('utf-8'))
# Verify
vl = QgsVectorLayer(self.testdata_path + 'test_project_wms_grouped_layers.gpkg|layername=as_symbols', 'as_symbols')
self.assertTrue(vl.isValid())
feature = next(vl.getFeatures(QgsFeatureRequest(QgsExpression('"name" = \'4326-test1\''))))
geom = feature.geometry()
tr = QgsCoordinateTransform(QgsCoordinateReferenceSystem.fromEpsgId(4326), vl.crs(), QgsCoordinateTransformContext())
geom_4326 = QgsGeometry.fromWkt('point( 10.67 52.48)')
geom_4326.transform(tr)
self.assertEqual(geom.asWkt(0), geom_4326.asWkt(0))
# Now: insert a feature in layer's CRS
request = post_data.format(
name='25832-test1',
version='1.1.0',
srsName='EPSG:25832',
coordinates='613412,5815738'
)
header, body = self._execute_request(
query_string, requestMethod=QgsServerRequest.PostMethod, data=request.encode('utf-8'))
feature = next(vl.getFeatures(QgsFeatureRequest(QgsExpression('"name" = \'25832-test1\''))))
geom = feature.geometry()
self.assertEqual(geom.asWkt(0), geom_4326.asWkt(0))
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -1,979,046,791,227,349,800 | 47.110325 | 350 | 0.616864 | false |
Mauricio3000/fk_ik_sine_rig | tests/test_rig/test_sine_rig.py | 1 | 1187 | import unittest
import pymel.core as pm
from tool.errors import errors
from tool.rig import sine_rig
class Test_sine_rig(unittest.TestCase):
def test_sine_rig_build_errors(self):
self.assertRaises(errors.InputError, sine_rig.build)
self.assertRaises(errors.InputError, sine_rig.build,
'temp', 'plane', 'reg_node')
def test_sine_rig_build(self):
name = 'temp'
crv = pm.circle()[0]
reg_node = pm.nt.Transform()
cnt = pm.circle()[0]
reg_node.addAttr('temp1_ik_cnt', at='message')
reg_node.addAttr('version', at='message')
reg_node.addAttr('reg_node', at='message')
cnt.message >> reg_node.temp1_ik_cnt
reg_node = sine_rig.build(name, crv, reg_node)
for a in ['wavelength', 'amplitude', 'sineOffOn',
'offset', 'direction']:
self.assertTrue(hasattr(cnt, a))
self.assertTrue(hasattr(reg_node, 'sine_handle'))
self.assertTrue(pm.PyNode('%s_sineDeformer' % name).objExists())
def suite():
return unittest.TestLoader().loadTestsFromTestCase(Test_sine_rig)
if __name__ == "__main__":
unittest.__main__()
| gpl-3.0 | 1,519,589,025,920,111,400 | 31.081081 | 72 | 0.608256 | false |
AltSchool/django | tests/gis_tests/layermap/models.py | 235 | 2523 | from django.utils.encoding import python_2_unicode_compatible
from ..models import models
@python_2_unicode_compatible
class NamedModel(models.Model):
name = models.CharField(max_length=25)
objects = models.GeoManager()
class Meta:
abstract = True
required_db_features = ['gis_enabled']
def __str__(self):
return self.name
class State(NamedModel):
pass
class County(NamedModel):
state = models.ForeignKey(State, models.CASCADE)
mpoly = models.MultiPolygonField(srid=4269) # Multipolygon in NAD83
class CountyFeat(NamedModel):
poly = models.PolygonField(srid=4269)
class City(NamedModel):
name_txt = models.TextField(default='')
name_short = models.CharField(max_length=5)
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
dt = models.DateField()
point = models.PointField()
class Meta:
app_label = 'layermap'
required_db_features = ['gis_enabled']
class Interstate(NamedModel):
length = models.DecimalField(max_digits=6, decimal_places=2)
path = models.LineStringField()
class Meta:
app_label = 'layermap'
required_db_features = ['gis_enabled']
# Same as `City` above, but for testing model inheritance.
class CityBase(NamedModel):
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
point = models.PointField()
class ICity1(CityBase):
dt = models.DateField()
class Meta(CityBase.Meta):
pass
class ICity2(ICity1):
dt_time = models.DateTimeField(auto_now=True)
class Meta(ICity1.Meta):
pass
class Invalid(models.Model):
point = models.PointField()
class Meta:
required_db_features = ['gis_enabled']
# Mapping dictionaries for the models above.
co_mapping = {
'name': 'Name',
# ForeignKey's use another mapping dictionary for the _related_ Model (State in this case).
'state': {'name': 'State'},
'mpoly': 'MULTIPOLYGON', # Will convert POLYGON features into MULTIPOLYGONS.
}
cofeat_mapping = {'name': 'Name',
'poly': 'POLYGON',
}
city_mapping = {'name': 'Name',
'population': 'Population',
'density': 'Density',
'dt': 'Created',
'point': 'POINT',
}
inter_mapping = {'name': 'Name',
'length': 'Length',
'path': 'LINESTRING',
}
| bsd-3-clause | 6,786,598,340,705,368,000 | 23.028571 | 95 | 0.62505 | false |
collective/cyn.in | products/WebServerAuth/tests/test_extraction.py | 4 | 2764 | """Unit tests for extraction plugin"""
from Products.PloneTestCase import PloneTestCase
from Products.CMFCore.utils import getToolByName
from Products.WebServerAuth.utils import firstInstanceOfClass
from Products.WebServerAuth.plugin import usernameKey, defaultUsernameHeader, stripDomainNamesKey, usernameHeaderKey
from Products.WebServerAuth.tests.base import WebServerAuthTestCase
PloneTestCase.installProduct('WebServerAuth')
PloneTestCase.setupPloneSite(products=['WebServerAuth'])
_username = 'someUsername'
_domain = 'example.com'
_userAtDomain = '%s@%s' % (_username, _domain)
class _MockRequest(object):
def __init__(self, environ=None):
self.environ = environ or {}
class TestExtraction(WebServerAuthTestCase):
def afterSetUp(self):
self.plugin = self._plugin()
def testDefaultExtraction(self):
"""Assert default behavior of extraction works."""
request = _MockRequest()
self.failUnless(self.plugin.extractCredentials(request) is None, msg="Found credentials to extract, even though we shouldn't have.")
request.environ[defaultUsernameHeader] = _username
self.failUnlessEqual(self.plugin.extractCredentials(request), {usernameKey: _username})
# Make sure the domain name gets stripped off the end of the username by default:
request.environ[defaultUsernameHeader] = _userAtDomain
self.failUnlessEqual(self.plugin.extractCredentials(request), {usernameKey: _username})
def testUsernameHeaderCustomization(self):
"""Assert the name of the header in which the username is passed can be changed."""
alternateHeader = 'HTTP_REMOTE_USER'
request = _MockRequest(environ={alternateHeader: _username})
saveHeader = self.plugin.config[usernameHeaderKey]
self.plugin.config[usernameHeaderKey] = alternateHeader
try:
self.failUnlessEqual(self.plugin.extractCredentials(request), {usernameKey: _username})
finally:
self.plugin.config[usernameHeaderKey] = saveHeader
def testDomainStripping(self):
"""Assert choosing to not strip the domain off the end of a whatever@here.com username works."""
request = _MockRequest(environ={defaultUsernameHeader: _userAtDomain})
saveStrip = self.plugin.config[stripDomainNamesKey]
self.plugin.config[stripDomainNamesKey] = False
try:
self.failUnlessEqual(self.plugin.extractCredentials(request), {usernameKey: _userAtDomain})
finally:
self.plugin.config[stripDomainNamesKey] = saveStrip
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestExtraction))
return suite
| gpl-3.0 | 2,585,586,399,240,321,500 | 42.1875 | 140 | 0.72178 | false |
dhanunjaya/neutron | neutron/agent/l3/dvr.py | 26 | 2827 | # Copyright (c) 2014 Openstack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import weakref
from oslo_log import log as logging
from neutron.agent.l3 import dvr_fip_ns
from neutron.agent.l3 import dvr_snat_ns
LOG = logging.getLogger(__name__)
# TODO(Carl) Following constants retained to increase SNR during refactoring
SNAT_INT_DEV_PREFIX = dvr_snat_ns.SNAT_INT_DEV_PREFIX
SNAT_NS_PREFIX = dvr_snat_ns.SNAT_NS_PREFIX
class AgentMixin(object):
def __init__(self, host):
# dvr data
self._fip_namespaces = weakref.WeakValueDictionary()
super(AgentMixin, self).__init__(host)
def get_fip_ns(self, ext_net_id):
# TODO(Carl) is this necessary? Code that this replaced was careful to
# convert these to string like this so I preserved that.
ext_net_id = str(ext_net_id)
fip_ns = self._fip_namespaces.get(ext_net_id)
if fip_ns and not fip_ns.destroyed:
return fip_ns
fip_ns = dvr_fip_ns.FipNamespace(ext_net_id,
self.conf,
self.driver,
self.use_ipv6)
self._fip_namespaces[ext_net_id] = fip_ns
return fip_ns
def get_ports_by_subnet(self, subnet_id):
return self.plugin_rpc.get_ports_by_subnet(self.context, subnet_id)
def add_arp_entry(self, context, payload):
"""Add arp entry into router namespace. Called from RPC."""
router_id = payload['router_id']
ri = self.router_info.get(router_id)
if not ri:
return
arp_table = payload['arp_table']
ip = arp_table['ip_address']
mac = arp_table['mac_address']
subnet_id = arp_table['subnet_id']
ri._update_arp_entry(ip, mac, subnet_id, 'add')
def del_arp_entry(self, context, payload):
"""Delete arp entry from router namespace. Called from RPC."""
router_id = payload['router_id']
ri = self.router_info.get(router_id)
if not ri:
return
arp_table = payload['arp_table']
ip = arp_table['ip_address']
mac = arp_table['mac_address']
subnet_id = arp_table['subnet_id']
ri._update_arp_entry(ip, mac, subnet_id, 'delete')
| apache-2.0 | 7,444,266,319,862,596,000 | 34.78481 | 79 | 0.619031 | false |
ThiagoGarciaAlves/erpnext | erpnext/accounts/doctype/cost_center/cost_center.py | 16 | 2789 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import msgprint, _
from frappe.utils.nestedset import NestedSet
class CostCenter(NestedSet):
nsm_parent_field = 'parent_cost_center'
def autoname(self):
self.name = self.cost_center_name.strip() + ' - ' + \
frappe.db.get_value("Company", self.company, "abbr")
def validate_mandatory(self):
if self.cost_center_name != self.company and not self.parent_cost_center:
msgprint(_("Please enter parent cost center"), raise_exception=1)
elif self.cost_center_name == self.company and self.parent_cost_center:
msgprint(_("Root cannot have a parent cost center"), raise_exception=1)
def convert_group_to_ledger(self):
if self.check_if_child_exists():
msgprint(_("Cannot convert Cost Center to ledger as it has child nodes"), raise_exception=1)
elif self.check_gle_exists():
msgprint(_("Cost Center with existing transactions can not be converted to ledger"), raise_exception=1)
else:
self.is_group = 0
self.save()
return 1
def convert_ledger_to_group(self):
if self.check_gle_exists():
msgprint(_("Cost Center with existing transactions can not be converted to group"), raise_exception=1)
else:
self.is_group = 1
self.save()
return 1
def check_gle_exists(self):
return frappe.db.get_value("GL Entry", {"cost_center": self.name})
def check_if_child_exists(self):
return frappe.db.sql("select name from `tabCost Center` where \
parent_cost_center = %s and docstatus != 2", self.name)
def validate_budget_details(self):
check_acc_list = []
for d in self.get('budgets'):
if self.is_group==1:
msgprint(_("Budget cannot be set for Group Cost Centers"), raise_exception=1)
if [d.account, d.fiscal_year] in check_acc_list:
msgprint(_("Account {0} has been entered more than once for fiscal year {1}").format(d.account, d.fiscal_year), raise_exception=1)
else:
check_acc_list.append([d.account, d.fiscal_year])
def validate(self):
self.validate_mandatory()
self.validate_budget_details()
def before_rename(self, olddn, newdn, merge=False):
# Add company abbr if not provided
from erpnext.setup.doctype.company.company import get_name_with_abbr
new_cost_center = get_name_with_abbr(newdn, self.company)
# Validate properties before merging
super(CostCenter, self).before_rename(olddn, new_cost_center, merge, "is_group")
return new_cost_center
def after_rename(self, olddn, newdn, merge=False):
if not merge:
frappe.db.set_value("Cost Center", newdn, "cost_center_name",
" - ".join(newdn.split(" - ")[:-1]))
else:
super(CostCenter, self).after_rename(olddn, newdn, merge)
| agpl-3.0 | -780,211,647,400,351,400 | 33.8625 | 134 | 0.71029 | false |
watchdogpolska/feder | feder/institutions/migrations/0008_auto_20161001_2053.py | 1 | 2085 | # Generated by Django 1.10.1 on 2016-10-01 20:53
import django.utils.timezone
import model_utils.fields
from django.db import migrations, models
def forwards_func(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
Institution = apps.get_model("institutions", "institution")
Email = apps.get_model("institutions", "Email")
db_alias = schema_editor.connection.alias
for institution in Institution.objects.using(db_alias).all():
emails = list(
Email.objects.filter(institution=institution.pk).order_by("priority").all()
)
if emails:
institution.email = max(emails, key=lambda x: x.priority).email
institution.save()
class Migration(migrations.Migration):
dependencies = [("institutions", "0007_auto_20160912_2250")]
operations = [
migrations.AlterUniqueTogether(name="email", unique_together=set()),
migrations.AddField(
model_name="institution",
name="email",
field=models.EmailField(
default="default-email@example.com",
max_length=254,
verbose_name="Email of institution",
),
preserve_default=False,
),
migrations.RunPython(forwards_func),
migrations.RemoveField(model_name="email", name="institution"),
migrations.AddField(
model_name="institution",
name="created",
field=model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
migrations.AddField(
model_name="institution",
name="modified",
field=model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="modified",
),
),
migrations.DeleteModel(name="Email"),
]
| mit | 1,577,158,346,231,072,800 | 34.338983 | 87 | 0.596163 | false |
mandeepdhami/netvirt-ctrl | cli/midw.py | 3 | 18056 | #
# Copyright (c) 2012,2013 Big Switch Networks, Inc.
#
# Licensed under the Eclipse Public License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.eclipse.org/legal/epl-v10.html
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# The database/model descriptions exist to meet particular
# needs, for example, switch-alias exist to provide an
# alternate name space from dpid's, to allow for a more
# readable and human usable form of the same dpid. Aliases
# would then naturally need a alias->dpid conversion, and
# at the same time, a dpid->alias (at least for the display
# of dpid's).
#
# The functions in this file provide these type of abstractions,
# taking the output from model lookup's in the rest api, and
# supplying some service used by the cli.
#
import rest_to_model
import fmtcnv
import json
import utif
def init_midware(bs, modi):
global sdnsh, mi
sdnsh = bs
mi = modi
#
# --------------------------------------------------------------------------------
def create_obj_type_dict(obj_type, field, key = None, value = None):
"""
Return a dictionary from a table search, where the key is one of the
fields. This doesn't manage multiple field matches.
Typically, the field selected is a foreign key for the obj_type.
For ('host-network-address', 'host'), this creates a dict
indexed by the mac address, returning the row in the table associated
with the mac (since the primary key for 'host' is a mac address).
For ('tag-mapping', 'host'), this creates a dict indexed by
the mac, returning the matching row in the table.
note: This gets the whole table
"""
if not mi.obj_type_has_field(obj_type, field):
return {}
if not mi.obj_type_has_model(obj_type):
data = {}
if key and value:
data[key] = value
rows = rest_to_model.get_model_from_url(obj_type, data)
elif not type(key) is dict:
try:
rows = sdnsh.get_table_from_store(obj_type, key, value)
except Exception, e:
errors = sdnsh.rest_error_to_dict(e)
print sdnsh.rest_error_dict_to_message(errors)
rows = []
else:
try:
rows = sdnsh.rest_query_objects(obj_type, key)
except Exception, e:
errors = sdnsh.rest_error_to_dict(e)
print sdnsh.rest_error_dict_to_message(errors)
rows = []
s_dict = {}
for row in rows:
if row[field] in s_dict:
s_dict[row[field]].append(row)
else:
s_dict[row[field]] = [row]
return s_dict
#
# ALIAS
#
#
# --------------------------------------------------------------------------------
def alias_lookup(alias_obj_type, alias_id):
"""
Return the value for the alias replacement by looking it up in the store.
When there is no alias replacement, return None.
"""
field = mi.alias_obj_type_field(alias_obj_type)
if not field:
print sdnsh.error_msg("Error: no field for alias")
return None
try:
alias_key = mi.pk(alias_obj_type)
# use an exact search instead of a 'get_object...()' since
# a miss for an exact search can lead to a 404 error, which
# gets recorded in the error logs
alias_row = sdnsh.get_table_from_store(alias_obj_type,
alias_key,
alias_id,
"exact")
if len(alias_row) == 1:
return alias_row[0][field]
# only len(alias_row) == 0 at this point
except:
pass
return None
#
# --------------------------------------------------------------------------------
def convert_alias_to_object_key(obj_type, name_or_alias):
"""
For a specific obj_type (table/model) which may have an alias 'row',
return the alias when it exists for this name_or_alias.
"""
if obj_type in mi.alias_obj_type_xref:
if name_or_alias in sdnsh.reserved_words:
return name_or_alias
for alias in mi.alias_obj_type_xref[obj_type]:
alias_value = alias_lookup(alias, name_or_alias)
if alias_value:
return alias_value
return name_or_alias
#
# --------------------------------------------------------------------------------
def alias_choices_for_alias_obj_type(entries, obj_type, text):
"""
Used to return all choices of entries for an alias. Remove any original
items which appear in the entries list passed in preventing duplication
of entries
Also see cp_alias_choices(), which is similar, but includes
the current mode.
"""
if obj_type in mi.alias_obj_type_xref:
for alias in mi.alias_obj_type_xref[obj_type]:
try:
key = mi.pk(alias)
alias_dict = create_obj_type_dict(alias, key, key, text)
#
# remove the alias name if the dpid is in the
# list of entries... In all cases the alias is added,
# especially since the alias_dict may only contain selected
# entries from the 'text' query, and entries may already
# exclude those items.
alias_field = mi.alias_obj_type_field(alias)
if not alias_field:
continue
for item in alias_dict:
if alias_dict[item][0][alias_field] in entries:
entries.remove(alias_dict[item][0][alias_field])
entries.append(item)
except Exception, e:
pass
return entries
#
# --------------------------------------------------------------------------------
def alias_lookup_with_foreign_key(alias_obj_type, foreign_key):
"""
Find the alias name for some alias based on the foreign key's
value it's associaed with.
"""
foreign_field = mi.alias_obj_type_field(alias_obj_type)
try:
rows = sdnsh.get_table_from_store(alias_obj_type,
foreign_field,
foreign_key,
"exact")
except Exception, e:
errors = sdnsh.rest_error_to_dict(e)
print sdnsh.rest_error_dict_to_message(errors)
rows = []
if len(rows) == 1:
return rows[0][mi.pk(alias_obj_type)]
return None
#
# Interface between the cli and table output requires dictionaries
# which map between low item type values (for example, dpid's) and
# alias names for the items (for example, switch aliases), to be
# updated before display. If cassandra could provide some version
# number (or hash of the complete table), the lookup could be avoided
# by valiating that the current result is up-to-date.
#
#
# --------------------------------------------------------------------------------
def update_show_alias(obj_type):
"""
Update alias associations for the pretty printer, used for the
'show' of tables
"""
if obj_type in mi.alias_obj_type_xref:
for alias in mi.alias_obj_type_xref[obj_type]:
field = mi.alias_obj_type_field(alias)
if not field:
print sdnsh.error_msg("update show alias alias_obj_type_field")
return
try:
table = sdnsh.get_table_from_store(alias)
except Exception, e:
table = []
new_dict = {}
key = mi.pk(alias)
# (foreign_obj, foreign_field) = \
# mi.foreign_key_references(alias, field)
for row in table:
new_dict[row[field]] = row[key]
fmtcnv.update_alias_dict(obj_type, new_dict)
return
#
# --------------------------------------------------------------------------------
def update_switch_alias_cache():
"""
Update the cliModeInfo prettyprinting switch table
"""
return update_show_alias('switch-config')
#
# --------------------------------------------------------------------------------
def update_switch_port_name_cache():
"""
Update the cliModeInfo prettyprinting portNames table
"""
# return update_show_alias('port')
errors = None
switch_port_to_name_dict = {}
try:
ports = rest_to_model.get_model_from_url('interfaces', {})
except Exception, e:
errors = sdnsh.rest_error_to_dict(e)
if errors:
print sdnsh.rest_error_dict_to_message(errors)
return
for port in ports:
key_string = port['switch'] + "." + "%d" % port['portNumber']
switch_port_to_name_dict[key_string] = port['portName']
fmtcnv.update_alias_dict("portNames", switch_port_to_name_dict)
#
# --------------------------------------------------------------------------------
def update_host_alias_cache():
"""
Update the cliModeInfo prettyprinting host table
"""
return update_show_alias('host-config')
#
# --------------------------------------------------------------------------------
# update_flow_cookie_hash
def update_flow_cookie_hash():
"""
The formatter keeps a map for static flow entries.
"""
# iterate through all the static flows and get their hashes once
flow_map = {}
prime = 211
for sf in sdnsh.get_table_from_store("flow-entry"):
flow_hash = 2311
for i in range(0, len(sf['name'])):
flow_hash = flow_hash * prime + ord(sf['name'][i])
flow_hash = flow_hash & ( (1 << 20) - 1)
flow_map[flow_hash] = sf['name']
fmtcnv.update_alias_dict("staticflow", flow_map)
fmtcnv.callout_flow_encoders(sdnsh)
#
# --------------------------------------------------------------------------------
#
def update_controller_node_alias_cache():
return update_show_alias('controller-node')
#
# --------------------------------------------------------------------------------
#
def obj_type_show_alias_update(obj_type):
"""
When some item is about to be displayed, particular 'alias'
items for the display may require updating. instead of just
updating everything all the time, peek at the different formatting
functions and use those function names to determine what needs to
be updated.
Also see formatter_to_update in climodelinfo, since it may
need to include new formatting functions.
"""
update = {}
sdnsh.pp.format_to_alias_update(obj_type, update)
# select objects from 'update' dict
if 'host' in update:
update_host_alias_cache()
if 'switch' in update:
update_switch_alias_cache()
if 'port' in update:
update_switch_port_name_cache()
if 'flow' in update:
update_flow_cookie_hash()
if 'controller-node' in update:
update_controller_node_alias_cache()
#
# OBJECTs middleware.
#
#
# --------------------------------------------------------------------------------
def objects_starting_with(obj_type, text = "", key = None):
"""
The function returns a list of matching keys from table/model
identified by the 'obj_type' parameter
If the table/model has a 'alias' field, then this field's
values are also examined for matches
The first argument is the name of a table/model in the store,
while the second argument is a prefix to filter the results.
The filter is applied to the key of the table/model, which
was previously populated.
"""
if key:
if not mi.obj_type_has_field(obj_type, key):
sdnsh.warning("objects_starting_with: %s doesn't have field %s" %
(obj_type, key))
else:
key = mi.pk(obj_type)
if key == None:
sdnsh.warning("objects_starting_with: %s doesn't have pk" %
(obj_type))
key_entries = []
# Next, find the object
# Deal with any changes to the lookup name based on the 'contenation'
# of the config mode name to the named identifer.
#
case = mi.get_obj_type_field_case_sensitive(obj_type, key)
id_value = utif.convert_case(case, text)
if mi.obj_type_has_model(obj_type):
# from the database
try:
entries = sdnsh.get_table_from_store(obj_type, key, id_value)
errors = None
except Exception, e:
errors = sdnsh.rest_error_to_dict(e)
if errors:
print sdnsh.rest_error_dict_to_message(errors)
return key_entries
else:
if id_value == '':
entries = rest_to_model.get_model_from_url(obj_type, {})
else:
entries = rest_to_model.get_model_from_url(obj_type, { key + "__startswith" : id_value })
if key and entries:
# Expand any key values which are lists (hosts, for example)
items = [x[key] for x in entries if x.get(key)]
entries = []
for item in items:
if type(item) == list:
entries += item
else:
entries.append(item)
key_entries = [sdnsh.quote_item(obj_type, x)
for x in entries if x.startswith(id_value)]
#
# for some specific tables which have id's concatenated from multiple other
# components, only part of the id is available for completion.
#
if mi.is_compound_key(obj_type, key):
separator_character = mi.compound_key_separator(obj_type, key)
keyDict = {}
for key in key_entries:
# keyDict[key.split(separator_character)[0]] = ''
keyDict[key] = ''
key_entries = keyDict.keys()
alias_obj_type = obj_type
if key != mi.pk(alias_obj_type):
# if this is a forgeign key, use the obj_type of the fk.
if mi.is_foreign_key(alias_obj_type, key):
(alias_obj_type, fk_name) = mi.foreign_key_references(alias_obj_type, key)
else:
# XXX possibly other choices to determine alias_obj_type?
alias_obj_type = None
if alias_obj_type:
obj_type_config = mi.obj_type_related_config_obj_type(alias_obj_type)
# alias_choices_for_alias_obj_type() removes switch dpid's which
# have associated alias names,
key_entries = alias_choices_for_alias_obj_type(key_entries,
obj_type_config,
text)
return key_entries
#
# --------------------------------------------------------------------------------
def local_interfaces_firewall_open(protos, ports, controller_id = None):
"""
Return a list of interfaces, which have the proto and port currently enabled
@param proto a string, or list of strings, identifying the protocol
@param port a strings, or list of strings or ints
"""
# first collect all associated rules
if type(protos) != list:
protos = [protos]
if type(ports) != list:
ports = [ports]
rules = []
for proto in protos:
for port in ports:
query_dict = { 'proto' : proto, 'port' : port }
rules += sdnsh.rest_query_objects('firewall-rule', query_dict)
# create a dictionary indexed by the interface, which is part of the pk 'id'
rules_of_interface = dict([[x['interface'], x] for x in rules])
if controller_id == None:
# request 'this' controller
controller_url = "http://%s/rest/v1/system/controller" % sdnsh.controller
result = sdnsh.store.rest_simple_request(controller_url)
sdnsh.check_rest_result(result)
controller_id = json.loads(result)
if controller_id != 'all':
query_dict = { 'controller' : controller_id['id'] }
ifs = sdnsh.rest_query_objects('controller-interface', query_dict)
return [ifn for ifn in ifs if ifn['id'] in rules_of_interface]
#
# --------------------------------------------------------------------------------
def log_url(ip_and_port = None, log = None):
"""
Returns the url of the log's on the named ip_and_port.
"""
log_path = 'http://%s/rest/v1/system/log' % ip_and_port
if log:
log_path += '/' + log
return log_path
#
# --------------------------------------------------------------------------------
def controller_ip_and_port(controller):
"""
Return a list of ip:port values for named controllers,
to use to build urls for REST API's. If a controller of 'all'
is passed in, then all the controllers ar enumerated.
If both port 80, and 8000 are open, then two ip:port
pairs will be returned for a controller. This returns
ALL values which match, not a single ip:port for each
controller.
"""
url = 'http://%s/rest/v1/system/controller' % sdnsh.controller
rest_dict = sdnsh.rest_simple_request_to_dict(url)
this_controller = rest_dict['id']
ips_80 = [x for x in local_interfaces_firewall_open('tcp', 80,
controller)
if (x['ip'] != '' or x['discovered-ip'] != '')]
ips_8000 = [x for x in local_interfaces_firewall_open('tcp', 8000,
controller)
if (x['ip'] != '' or x['discovered-ip'] != '')]
return ['%s:80' % '127.0.0.1' if x['controller'] == this_controller else
x['discovered-ip'] if x['discovered-ip'] != '' else x['ip']
for x in ips_80] + ['%s:8000' %
'127.0.0.1' if x['controller'] == this_controller else
x['discovered-ip'] if x['discovered-ip'] != '' else x['ip']
for x in ips_8000]
| epl-1.0 | -706,108,455,022,081,500 | 33.132325 | 101 | 0.553057 | false |
doug-fish/horizon | openstack_dashboard/test/api_tests/cinder_tests.py | 21 | 8000 | # Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.test.utils import override_settings
import six
import cinderclient as cinder_client
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class CinderApiTests(test.APITestCase):
def test_volume_list(self):
search_opts = {'all_tenants': 1}
detailed = True
volumes = self.cinder_volumes.list()
volume_transfers = self.cinder_volume_transfers.list()
cinderclient = self.stub_cinderclient()
cinderclient.volumes = self.mox.CreateMockAnything()
cinderclient.volumes.list(search_opts=search_opts,).AndReturn(volumes)
cinderclient.transfers = self.mox.CreateMockAnything()
cinderclient.transfers.list(
detailed=detailed,
search_opts=search_opts,).AndReturn(volume_transfers)
self.mox.ReplayAll()
# No assertions are necessary. Verification is handled by mox.
api.cinder.volume_list(self.request, search_opts=search_opts)
def test_volume_snapshot_list(self):
search_opts = {'all_tenants': 1}
volume_snapshots = self.cinder_volume_snapshots.list()
cinderclient = self.stub_cinderclient()
cinderclient.volume_snapshots = self.mox.CreateMockAnything()
cinderclient.volume_snapshots.list(search_opts=search_opts).\
AndReturn(volume_snapshots)
self.mox.ReplayAll()
api.cinder.volume_snapshot_list(self.request, search_opts=search_opts)
def test_volume_snapshot_list_no_volume_configured(self):
# remove volume from service catalog
catalog = self.service_catalog
for service in catalog:
if service["type"] == "volume":
self.service_catalog.remove(service)
search_opts = {'all_tenants': 1}
volume_snapshots = self.cinder_volume_snapshots.list()
cinderclient = self.stub_cinderclient()
cinderclient.volume_snapshots = self.mox.CreateMockAnything()
cinderclient.volume_snapshots.list(search_opts=search_opts).\
AndReturn(volume_snapshots)
self.mox.ReplayAll()
api.cinder.volume_snapshot_list(self.request, search_opts=search_opts)
def test_volume_type_list_with_qos_associations(self):
volume_types = self.cinder_volume_types.list()
# Due to test data limitations, we can only run this test using
# one qos spec, which is associated with one volume type.
# If we use multiple qos specs, the test data will always
# return the same associated volume type, which is invalid
# and prevented by the UI.
qos_specs_full = self.cinder_qos_specs.list()
qos_specs_only_one = [qos_specs_full[0]]
associations = self.cinder_qos_spec_associations.list()
cinderclient = self.stub_cinderclient()
cinderclient.volume_types = self.mox.CreateMockAnything()
cinderclient.volume_types.list().AndReturn(volume_types)
cinderclient.qos_specs = self.mox.CreateMockAnything()
cinderclient.qos_specs.list().AndReturn(qos_specs_only_one)
cinderclient.qos_specs.get_associations = self.mox.CreateMockAnything()
cinderclient.qos_specs.get_associations(qos_specs_only_one[0].id).\
AndReturn(associations)
self.mox.ReplayAll()
assoc_vol_types = \
api.cinder.volume_type_list_with_qos_associations(self.request)
associate_spec = assoc_vol_types[0].associated_qos_spec
self.assertTrue(associate_spec, qos_specs_only_one[0].name)
def test_absolute_limits_with_negative_values(self):
values = {"maxTotalVolumes": -1, "totalVolumesUsed": -1}
expected_results = {"maxTotalVolumes": float("inf"),
"totalVolumesUsed": 0}
limits = self.mox.CreateMockAnything()
limits.absolute = []
for key, val in six.iteritems(values):
limit = self.mox.CreateMockAnything()
limit.name = key
limit.value = val
limits.absolute.append(limit)
cinderclient = self.stub_cinderclient()
cinderclient.limits = self.mox.CreateMockAnything()
cinderclient.limits.get().AndReturn(limits)
self.mox.ReplayAll()
ret_val = api.cinder.tenant_absolute_limits(self.request)
for key in expected_results.keys():
self.assertEqual(expected_results[key], ret_val[key])
def test_pool_list(self):
pools = self.cinder_pools.list()
cinderclient = self.stub_cinderclient()
cinderclient.pools = self.mox.CreateMockAnything()
cinderclient.pools.list(detailed=True).AndReturn(pools)
self.mox.ReplayAll()
# No assertions are necessary. Verification is handled by mox.
api.cinder.pool_list(self.request, detailed=True)
class CinderApiVersionTests(test.TestCase):
def setUp(self):
super(CinderApiVersionTests, self).setUp()
# The version is set when the module is loaded. Reset the
# active version each time so that we can test with different
# versions.
api.cinder.VERSIONS._active = None
def test_default_client_is_v2(self):
client = api.cinder.cinderclient(self.request)
self.assertIsInstance(client, cinder_client.v2.client.Client)
@override_settings(OPENSTACK_API_VERSIONS={'volume': 2})
def test_v2_setting_returns_v2_client(self):
client = api.cinder.cinderclient(self.request)
self.assertIsInstance(client, cinder_client.v2.client.Client)
def test_get_v2_volume_attributes(self):
# Get a v2 volume
volume = self.cinder_volumes.get(name="v2_volume")
self.assertTrue(hasattr(volume._apiresource, 'name'))
self.assertFalse(hasattr(volume._apiresource, 'display_name'))
name = "A v2 test volume name"
description = "A v2 volume description"
setattr(volume._apiresource, 'name', name)
setattr(volume._apiresource, 'description', description)
self.assertEqual(name, volume.name)
self.assertEqual(description, volume.description)
def test_get_v2_snapshot_attributes(self):
# Get a v2 snapshot
snapshot = self.cinder_volume_snapshots.get(
description="v2 volume snapshot description")
self.assertFalse(hasattr(snapshot._apiresource, 'display_name'))
name = "A v2 test snapshot name"
description = "A v2 snapshot description"
setattr(snapshot._apiresource, 'name', name)
setattr(snapshot._apiresource, 'description', description)
self.assertEqual(name, snapshot.name)
self.assertEqual(description, snapshot.description)
def test_get_id_for_nameless_volume(self):
volume = self.cinder_volumes.first()
setattr(volume._apiresource, 'display_name', "")
self.assertEqual(volume.id, volume.name)
def test_adapt_dictionary_to_v2(self):
volume = self.cinder_volumes.first()
data = {'name': volume.name,
'description': volume.description,
'size': volume.size}
ret_data = api.cinder._replace_v2_parameters(data)
self.assertIn('name', ret_data.keys())
self.assertIn('description', ret_data.keys())
self.assertNotIn('display_name', ret_data.keys())
self.assertNotIn('display_description', ret_data.keys())
| apache-2.0 | 3,727,324,006,380,202,000 | 41.328042 | 79 | 0.668125 | false |
chrishokamp/fuel | fuel/converters/caltech101_silhouettes.py | 12 | 2497 | import os
import h5py
from scipy.io import loadmat
from fuel.converters.base import fill_hdf5_file, MissingInputFiles
def convert_silhouettes(size, directory, output_directory,
output_file=None):
""" Convert the CalTech 101 Silhouettes Datasets.
Parameters
----------
size : {16, 28}
Convert either the 16x16 or 28x28 sized version of the dataset.
directory : str
Directory in which the required input files reside.
output_file : str
Where to save the converted dataset.
"""
if size not in (16, 28):
raise ValueError('size must be 16 or 28')
if output_file is None:
output_file = 'caltech101_silhouettes{}.hdf5'.format(size)
output_file = os.path.join(output_directory, output_file)
input_file = 'caltech101_silhouettes_{}_split1.mat'.format(size)
input_file = os.path.join(directory, input_file)
if not os.path.isfile(input_file):
raise MissingInputFiles('Required files missing', [input_file])
with h5py.File(output_file, mode="w") as h5file:
mat = loadmat(input_file)
train_features = mat['train_data'].reshape([-1, 1, size, size])
train_targets = mat['train_labels']
valid_features = mat['val_data'].reshape([-1, 1, size, size])
valid_targets = mat['val_labels']
test_features = mat['test_data'].reshape([-1, 1, size, size])
test_targets = mat['test_labels']
data = (
('train', 'features', train_features),
('train', 'targets', train_targets),
('valid', 'features', valid_features),
('valid', 'targets', valid_targets),
('test', 'features', test_features),
('test', 'targets', test_targets),
)
fill_hdf5_file(h5file, data)
for i, label in enumerate(('batch', 'channel', 'height', 'width')):
h5file['features'].dims[i].label = label
for i, label in enumerate(('batch', 'index')):
h5file['targets'].dims[i].label = label
return (output_file,)
def fill_subparser(subparser):
"""Sets up a subparser to convert CalTech101 Silhouettes Database files.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `caltech101_silhouettes` command.
"""
subparser.add_argument(
"size", type=int, choices=(16, 28),
help="height/width of the datapoints")
return convert_silhouettes
| mit | 2,978,130,629,956,573,000 | 31.855263 | 76 | 0.611934 | false |
norangmangto/pypi-default | setup.py | 1 | 4103 | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
import re
import ast
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('default/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='default',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=version,
description='Merge JSON data with default JSON data',
long_description=long_description,
# The project's main homepage.
url='https://github.com/norangmangto/pypi-default',
# Author details
author='Beomsoo Jang',
author_email='norangmangto@gmail.com',
# Choose your license
license='GPLv3',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='default development',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': [],
'test': [],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={},
)
| gpl-3.0 | 4,317,205,714,384,958,500 | 34.37069 | 94 | 0.665367 | false |
jabesq/home-assistant | homeassistant/components/caldav/calendar.py | 1 | 9850 | """Support for WebDav Calendar."""
import copy
from datetime import datetime, timedelta
import logging
import re
import voluptuous as vol
from homeassistant.components.calendar import (
ENTITY_ID_FORMAT, PLATFORM_SCHEMA, CalendarEventDevice, calculate_offset,
get_date, is_offset_reached)
from homeassistant.const import (
CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import generate_entity_id
from homeassistant.util import Throttle, dt
_LOGGER = logging.getLogger(__name__)
CONF_CALENDARS = 'calendars'
CONF_CUSTOM_CALENDARS = 'custom_calendars'
CONF_CALENDAR = 'calendar'
CONF_SEARCH = 'search'
OFFSET = '!!'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
# pylint: disable=no-value-for-parameter
vol.Required(CONF_URL): vol.Url(),
vol.Optional(CONF_CALENDARS, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Inclusive(CONF_USERNAME, 'authentication'): cv.string,
vol.Inclusive(CONF_PASSWORD, 'authentication'): cv.string,
vol.Optional(CONF_CUSTOM_CALENDARS, default=[]):
vol.All(cv.ensure_list, [
vol.Schema({
vol.Required(CONF_CALENDAR): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_SEARCH): cv.string,
})
]),
vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean
})
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
def setup_platform(hass, config, add_entities, disc_info=None):
"""Set up the WebDav Calendar platform."""
import caldav
url = config[CONF_URL]
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
client = caldav.DAVClient(
url, None, username, password, ssl_verify_cert=config[CONF_VERIFY_SSL])
calendars = client.principal().calendars()
calendar_devices = []
for calendar in list(calendars):
# If a calendar name was given in the configuration,
# ignore all the others
if (config[CONF_CALENDARS]
and calendar.name not in config[CONF_CALENDARS]):
_LOGGER.debug("Ignoring calendar '%s'", calendar.name)
continue
# Create additional calendars based on custom filtering rules
for cust_calendar in config[CONF_CUSTOM_CALENDARS]:
# Check that the base calendar matches
if cust_calendar[CONF_CALENDAR] != calendar.name:
continue
name = cust_calendar[CONF_NAME]
device_id = "{} {}".format(
cust_calendar[CONF_CALENDAR], cust_calendar[CONF_NAME])
entity_id = generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass)
calendar_devices.append(
WebDavCalendarEventDevice(
name, calendar, entity_id, True,
cust_calendar[CONF_SEARCH]))
# Create a default calendar if there was no custom one
if not config[CONF_CUSTOM_CALENDARS]:
name = calendar.name
device_id = calendar.name
entity_id = generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass)
calendar_devices.append(
WebDavCalendarEventDevice(name, calendar, entity_id)
)
add_entities(calendar_devices, True)
class WebDavCalendarEventDevice(CalendarEventDevice):
"""A device for getting the next Task from a WebDav Calendar."""
def __init__(self, name, calendar, entity_id, all_day=False, search=None):
"""Create the WebDav Calendar Event Device."""
self.data = WebDavCalendarData(calendar, all_day, search)
self.entity_id = entity_id
self._event = None
self._name = name
self._offset_reached = False
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return {
'offset_reached': self._offset_reached,
}
@property
def event(self):
"""Return the next upcoming event."""
return self._event
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_get_events(self, hass, start_date, end_date):
"""Get all events in a specific time frame."""
return await self.data.async_get_events(hass, start_date, end_date)
def update(self):
"""Update event data."""
self.data.update()
event = copy.deepcopy(self.data.event)
if event is None:
self._event = event
return
event = calculate_offset(event, OFFSET)
self._offset_reached = is_offset_reached(event)
self._event = event
class WebDavCalendarData:
"""Class to utilize the calendar dav client object to get next event."""
def __init__(self, calendar, include_all_day, search):
"""Set up how we are going to search the WebDav calendar."""
self.calendar = calendar
self.include_all_day = include_all_day
self.search = search
self.event = None
async def async_get_events(self, hass, start_date, end_date):
"""Get all events in a specific time frame."""
# Get event list from the current calendar
vevent_list = await hass.async_add_job(self.calendar.date_search,
start_date, end_date)
event_list = []
for event in vevent_list:
vevent = event.instance.vevent
uid = None
if hasattr(vevent, 'uid'):
uid = vevent.uid.value
data = {
"uid": uid,
"title": vevent.summary.value,
"start": self.get_hass_date(vevent.dtstart.value),
"end": self.get_hass_date(self.get_end_date(vevent)),
"location": self.get_attr_value(vevent, "location"),
"description": self.get_attr_value(vevent, "description"),
}
data['start'] = get_date(data['start']).isoformat()
data['end'] = get_date(data['end']).isoformat()
event_list.append(data)
return event_list
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data."""
# We have to retrieve the results for the whole day as the server
# won't return events that have already started
results = self.calendar.date_search(
dt.start_of_local_day(),
dt.start_of_local_day() + timedelta(days=1)
)
# dtstart can be a date or datetime depending if the event lasts a
# whole day. Convert everything to datetime to be able to sort it
results.sort(key=lambda x: self.to_datetime(
x.instance.vevent.dtstart.value
))
vevent = next((
event.instance.vevent
for event in results
if (self.is_matching(event.instance.vevent, self.search)
and (
not self.is_all_day(event.instance.vevent)
or self.include_all_day)
and not self.is_over(event.instance.vevent))), None)
# If no matching event could be found
if vevent is None:
_LOGGER.debug(
"No matching event found in the %d results for %s",
len(results), self.calendar.name)
self.event = None
return
# Populate the entity attributes with the event values
self.event = {
"summary": vevent.summary.value,
"start": self.get_hass_date(vevent.dtstart.value),
"end": self.get_hass_date(self.get_end_date(vevent)),
"location": self.get_attr_value(vevent, "location"),
"description": self.get_attr_value(vevent, "description")
}
@staticmethod
def is_matching(vevent, search):
"""Return if the event matches the filter criteria."""
if search is None:
return True
pattern = re.compile(search)
return (
hasattr(vevent, "summary")
and pattern.match(vevent.summary.value)
or hasattr(vevent, "location")
and pattern.match(vevent.location.value)
or hasattr(vevent, "description")
and pattern.match(vevent.description.value))
@staticmethod
def is_all_day(vevent):
"""Return if the event last the whole day."""
return not isinstance(vevent.dtstart.value, datetime)
@staticmethod
def is_over(vevent):
"""Return if the event is over."""
return dt.now() >= WebDavCalendarData.to_datetime(
WebDavCalendarData.get_end_date(vevent)
)
@staticmethod
def get_hass_date(obj):
"""Return if the event matches."""
if isinstance(obj, datetime):
return {"dateTime": obj.isoformat()}
return {"date": obj.isoformat()}
@staticmethod
def to_datetime(obj):
"""Return a datetime."""
if isinstance(obj, datetime):
return obj
return dt.as_local(dt.dt.datetime.combine(obj, dt.dt.time.min))
@staticmethod
def get_attr_value(obj, attribute):
"""Return the value of the attribute if defined."""
if hasattr(obj, attribute):
return getattr(obj, attribute).value
return None
@staticmethod
def get_end_date(obj):
"""Return the end datetime as determined by dtend or duration."""
if hasattr(obj, "dtend"):
enddate = obj.dtend.value
elif hasattr(obj, "duration"):
enddate = obj.dtstart.value + obj.duration.value
else:
enddate = obj.dtstart.value + timedelta(days=1)
return enddate
| apache-2.0 | 5,990,035,734,490,488,000 | 33.805654 | 79 | 0.6 | false |
charukiewicz/beer-manager | venv/lib/python3.4/site-packages/jinja2/testsuite/filters.py | 394 | 19169 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.filters
~~~~~~~~~~~~~~~~~~~~~~~~
Tests for the jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Markup, Environment
from jinja2._compat import text_type, implements_to_string
env = Environment()
class FilterTestCase(JinjaTestCase):
def test_filter_calling(self):
rv = env.call_filter('sum', [1, 2, 3])
self.assert_equal(rv, 6)
def test_capitalize(self):
tmpl = env.from_string('{{ "foo bar"|capitalize }}')
assert tmpl.render() == 'Foo bar'
def test_center(self):
tmpl = env.from_string('{{ "foo"|center(9) }}')
assert tmpl.render() == ' foo '
def test_default(self):
tmpl = env.from_string(
"{{ missing|default('no') }}|{{ false|default('no') }}|"
"{{ false|default('no', true) }}|{{ given|default('no') }}"
)
assert tmpl.render(given='yes') == 'no|False|no|yes'
def test_dictsort(self):
tmpl = env.from_string(
'{{ foo|dictsort }}|'
'{{ foo|dictsort(true) }}|'
'{{ foo|dictsort(false, "value") }}'
)
out = tmpl.render(foo={"aa": 0, "b": 1, "c": 2, "AB": 3})
assert out == ("[('aa', 0), ('AB', 3), ('b', 1), ('c', 2)]|"
"[('AB', 3), ('aa', 0), ('b', 1), ('c', 2)]|"
"[('aa', 0), ('b', 1), ('c', 2), ('AB', 3)]")
def test_batch(self):
tmpl = env.from_string("{{ foo|batch(3)|list }}|"
"{{ foo|batch(3, 'X')|list }}")
out = tmpl.render(foo=list(range(10)))
assert out == ("[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]|"
"[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 'X', 'X']]")
def test_slice(self):
tmpl = env.from_string('{{ foo|slice(3)|list }}|'
'{{ foo|slice(3, "X")|list }}')
out = tmpl.render(foo=list(range(10)))
assert out == ("[[0, 1, 2, 3], [4, 5, 6], [7, 8, 9]]|"
"[[0, 1, 2, 3], [4, 5, 6, 'X'], [7, 8, 9, 'X']]")
def test_escape(self):
tmpl = env.from_string('''{{ '<">&'|escape }}''')
out = tmpl.render()
assert out == '<">&'
def test_striptags(self):
tmpl = env.from_string('''{{ foo|striptags }}''')
out = tmpl.render(foo=' <p>just a small \n <a href="#">'
'example</a> link</p>\n<p>to a webpage</p> '
'<!-- <p>and some commented stuff</p> -->')
assert out == 'just a small example link to a webpage'
def test_filesizeformat(self):
tmpl = env.from_string(
'{{ 100|filesizeformat }}|'
'{{ 1000|filesizeformat }}|'
'{{ 1000000|filesizeformat }}|'
'{{ 1000000000|filesizeformat }}|'
'{{ 1000000000000|filesizeformat }}|'
'{{ 100|filesizeformat(true) }}|'
'{{ 1000|filesizeformat(true) }}|'
'{{ 1000000|filesizeformat(true) }}|'
'{{ 1000000000|filesizeformat(true) }}|'
'{{ 1000000000000|filesizeformat(true) }}'
)
out = tmpl.render()
self.assert_equal(out, (
'100 Bytes|1.0 kB|1.0 MB|1.0 GB|1.0 TB|100 Bytes|'
'1000 Bytes|976.6 KiB|953.7 MiB|931.3 GiB'
))
def test_filesizeformat_issue59(self):
tmpl = env.from_string(
'{{ 300|filesizeformat }}|'
'{{ 3000|filesizeformat }}|'
'{{ 3000000|filesizeformat }}|'
'{{ 3000000000|filesizeformat }}|'
'{{ 3000000000000|filesizeformat }}|'
'{{ 300|filesizeformat(true) }}|'
'{{ 3000|filesizeformat(true) }}|'
'{{ 3000000|filesizeformat(true) }}'
)
out = tmpl.render()
self.assert_equal(out, (
'300 Bytes|3.0 kB|3.0 MB|3.0 GB|3.0 TB|300 Bytes|'
'2.9 KiB|2.9 MiB'
))
def test_first(self):
tmpl = env.from_string('{{ foo|first }}')
out = tmpl.render(foo=list(range(10)))
assert out == '0'
def test_float(self):
tmpl = env.from_string('{{ "42"|float }}|'
'{{ "ajsghasjgd"|float }}|'
'{{ "32.32"|float }}')
out = tmpl.render()
assert out == '42.0|0.0|32.32'
def test_format(self):
tmpl = env.from_string('''{{ "%s|%s"|format("a", "b") }}''')
out = tmpl.render()
assert out == 'a|b'
def test_indent(self):
tmpl = env.from_string('{{ foo|indent(2) }}|{{ foo|indent(2, true) }}')
text = '\n'.join([' '.join(['foo', 'bar'] * 2)] * 2)
out = tmpl.render(foo=text)
assert out == ('foo bar foo bar\n foo bar foo bar| '
'foo bar foo bar\n foo bar foo bar')
def test_int(self):
tmpl = env.from_string('{{ "42"|int }}|{{ "ajsghasjgd"|int }}|'
'{{ "32.32"|int }}')
out = tmpl.render()
assert out == '42|0|32'
def test_join(self):
tmpl = env.from_string('{{ [1, 2, 3]|join("|") }}')
out = tmpl.render()
assert out == '1|2|3'
env2 = Environment(autoescape=True)
tmpl = env2.from_string('{{ ["<foo>", "<span>foo</span>"|safe]|join }}')
assert tmpl.render() == '<foo><span>foo</span>'
def test_join_attribute(self):
class User(object):
def __init__(self, username):
self.username = username
tmpl = env.from_string('''{{ users|join(', ', 'username') }}''')
assert tmpl.render(users=map(User, ['foo', 'bar'])) == 'foo, bar'
def test_last(self):
tmpl = env.from_string('''{{ foo|last }}''')
out = tmpl.render(foo=list(range(10)))
assert out == '9'
def test_length(self):
tmpl = env.from_string('''{{ "hello world"|length }}''')
out = tmpl.render()
assert out == '11'
def test_lower(self):
tmpl = env.from_string('''{{ "FOO"|lower }}''')
out = tmpl.render()
assert out == 'foo'
def test_pprint(self):
from pprint import pformat
tmpl = env.from_string('''{{ data|pprint }}''')
data = list(range(1000))
assert tmpl.render(data=data) == pformat(data)
def test_random(self):
tmpl = env.from_string('''{{ seq|random }}''')
seq = list(range(100))
for _ in range(10):
assert int(tmpl.render(seq=seq)) in seq
def test_reverse(self):
tmpl = env.from_string('{{ "foobar"|reverse|join }}|'
'{{ [1, 2, 3]|reverse|list }}')
assert tmpl.render() == 'raboof|[3, 2, 1]'
def test_string(self):
x = [1, 2, 3, 4, 5]
tmpl = env.from_string('''{{ obj|string }}''')
assert tmpl.render(obj=x) == text_type(x)
def test_title(self):
tmpl = env.from_string('''{{ "foo bar"|title }}''')
assert tmpl.render() == "Foo Bar"
tmpl = env.from_string('''{{ "foo's bar"|title }}''')
assert tmpl.render() == "Foo's Bar"
tmpl = env.from_string('''{{ "foo bar"|title }}''')
assert tmpl.render() == "Foo Bar"
tmpl = env.from_string('''{{ "f bar f"|title }}''')
assert tmpl.render() == "F Bar F"
tmpl = env.from_string('''{{ "foo-bar"|title }}''')
assert tmpl.render() == "Foo-Bar"
tmpl = env.from_string('''{{ "foo\tbar"|title }}''')
assert tmpl.render() == "Foo\tBar"
tmpl = env.from_string('''{{ "FOO\tBAR"|title }}''')
assert tmpl.render() == "Foo\tBar"
def test_truncate(self):
tmpl = env.from_string(
'{{ data|truncate(15, true, ">>>") }}|'
'{{ data|truncate(15, false, ">>>") }}|'
'{{ smalldata|truncate(15) }}'
)
out = tmpl.render(data='foobar baz bar' * 1000,
smalldata='foobar baz bar')
assert out == 'foobar baz barf>>>|foobar baz >>>|foobar baz bar'
def test_upper(self):
tmpl = env.from_string('{{ "foo"|upper }}')
assert tmpl.render() == 'FOO'
def test_urlize(self):
tmpl = env.from_string('{{ "foo http://www.example.com/ bar"|urlize }}')
assert tmpl.render() == 'foo <a href="http://www.example.com/">'\
'http://www.example.com/</a> bar'
def test_wordcount(self):
tmpl = env.from_string('{{ "foo bar baz"|wordcount }}')
assert tmpl.render() == '3'
def test_block(self):
tmpl = env.from_string('{% filter lower|escape %}<HEHE>{% endfilter %}')
assert tmpl.render() == '<hehe>'
def test_chaining(self):
tmpl = env.from_string('''{{ ['<foo>', '<bar>']|first|upper|escape }}''')
assert tmpl.render() == '<FOO>'
def test_sum(self):
tmpl = env.from_string('''{{ [1, 2, 3, 4, 5, 6]|sum }}''')
assert tmpl.render() == '21'
def test_sum_attributes(self):
tmpl = env.from_string('''{{ values|sum('value') }}''')
assert tmpl.render(values=[
{'value': 23},
{'value': 1},
{'value': 18},
]) == '42'
def test_sum_attributes_nested(self):
tmpl = env.from_string('''{{ values|sum('real.value') }}''')
assert tmpl.render(values=[
{'real': {'value': 23}},
{'real': {'value': 1}},
{'real': {'value': 18}},
]) == '42'
def test_sum_attributes_tuple(self):
tmpl = env.from_string('''{{ values.items()|sum('1') }}''')
assert tmpl.render(values={
'foo': 23,
'bar': 1,
'baz': 18,
}) == '42'
def test_abs(self):
tmpl = env.from_string('''{{ -1|abs }}|{{ 1|abs }}''')
assert tmpl.render() == '1|1', tmpl.render()
def test_round_positive(self):
tmpl = env.from_string('{{ 2.7|round }}|{{ 2.1|round }}|'
"{{ 2.1234|round(3, 'floor') }}|"
"{{ 2.1|round(0, 'ceil') }}")
assert tmpl.render() == '3.0|2.0|2.123|3.0', tmpl.render()
def test_round_negative(self):
tmpl = env.from_string('{{ 21.3|round(-1)}}|'
"{{ 21.3|round(-1, 'ceil')}}|"
"{{ 21.3|round(-1, 'floor')}}")
assert tmpl.render() == '20.0|30.0|20.0',tmpl.render()
def test_xmlattr(self):
tmpl = env.from_string("{{ {'foo': 42, 'bar': 23, 'fish': none, "
"'spam': missing, 'blub:blub': '<?>'}|xmlattr }}")
out = tmpl.render().split()
assert len(out) == 3
assert 'foo="42"' in out
assert 'bar="23"' in out
assert 'blub:blub="<?>"' in out
def test_sort1(self):
tmpl = env.from_string('{{ [2, 3, 1]|sort }}|{{ [2, 3, 1]|sort(true) }}')
assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
def test_sort2(self):
tmpl = env.from_string('{{ "".join(["c", "A", "b", "D"]|sort) }}')
assert tmpl.render() == 'AbcD'
def test_sort3(self):
tmpl = env.from_string('''{{ ['foo', 'Bar', 'blah']|sort }}''')
assert tmpl.render() == "['Bar', 'blah', 'foo']"
def test_sort4(self):
@implements_to_string
class Magic(object):
def __init__(self, value):
self.value = value
def __str__(self):
return text_type(self.value)
tmpl = env.from_string('''{{ items|sort(attribute='value')|join }}''')
assert tmpl.render(items=map(Magic, [3, 2, 4, 1])) == '1234'
def test_groupby(self):
tmpl = env.from_string('''
{%- for grouper, list in [{'foo': 1, 'bar': 2},
{'foo': 2, 'bar': 3},
{'foo': 1, 'bar': 1},
{'foo': 3, 'bar': 4}]|groupby('foo') -%}
{{ grouper }}{% for x in list %}: {{ x.foo }}, {{ x.bar }}{% endfor %}|
{%- endfor %}''')
assert tmpl.render().split('|') == [
"1: 1, 2: 1, 1",
"2: 2, 3",
"3: 3, 4",
""
]
def test_groupby_tuple_index(self):
tmpl = env.from_string('''
{%- for grouper, list in [('a', 1), ('a', 2), ('b', 1)]|groupby(0) -%}
{{ grouper }}{% for x in list %}:{{ x.1 }}{% endfor %}|
{%- endfor %}''')
assert tmpl.render() == 'a:1:2|b:1|'
def test_groupby_multidot(self):
class Date(object):
def __init__(self, day, month, year):
self.day = day
self.month = month
self.year = year
class Article(object):
def __init__(self, title, *date):
self.date = Date(*date)
self.title = title
articles = [
Article('aha', 1, 1, 1970),
Article('interesting', 2, 1, 1970),
Article('really?', 3, 1, 1970),
Article('totally not', 1, 1, 1971)
]
tmpl = env.from_string('''
{%- for year, list in articles|groupby('date.year') -%}
{{ year }}{% for x in list %}[{{ x.title }}]{% endfor %}|
{%- endfor %}''')
assert tmpl.render(articles=articles).split('|') == [
'1970[aha][interesting][really?]',
'1971[totally not]',
''
]
def test_filtertag(self):
tmpl = env.from_string("{% filter upper|replace('FOO', 'foo') %}"
"foobar{% endfilter %}")
assert tmpl.render() == 'fooBAR'
def test_replace(self):
env = Environment()
tmpl = env.from_string('{{ string|replace("o", 42) }}')
assert tmpl.render(string='<foo>') == '<f4242>'
env = Environment(autoescape=True)
tmpl = env.from_string('{{ string|replace("o", 42) }}')
assert tmpl.render(string='<foo>') == '<f4242>'
tmpl = env.from_string('{{ string|replace("<", 42) }}')
assert tmpl.render(string='<foo>') == '42foo>'
tmpl = env.from_string('{{ string|replace("o", ">x<") }}')
assert tmpl.render(string=Markup('foo')) == 'f>x<>x<'
def test_forceescape(self):
tmpl = env.from_string('{{ x|forceescape }}')
assert tmpl.render(x=Markup('<div />')) == u'<div />'
def test_safe(self):
env = Environment(autoescape=True)
tmpl = env.from_string('{{ "<div>foo</div>"|safe }}')
assert tmpl.render() == '<div>foo</div>'
tmpl = env.from_string('{{ "<div>foo</div>" }}')
assert tmpl.render() == '<div>foo</div>'
def test_urlencode(self):
env = Environment(autoescape=True)
tmpl = env.from_string('{{ "Hello, world!"|urlencode }}')
assert tmpl.render() == 'Hello%2C%20world%21'
tmpl = env.from_string('{{ o|urlencode }}')
assert tmpl.render(o=u"Hello, world\u203d") == "Hello%2C%20world%E2%80%BD"
assert tmpl.render(o=(("f", 1),)) == "f=1"
assert tmpl.render(o=(('f', 1), ("z", 2))) == "f=1&z=2"
assert tmpl.render(o=((u"\u203d", 1),)) == "%E2%80%BD=1"
assert tmpl.render(o={u"\u203d": 1}) == "%E2%80%BD=1"
assert tmpl.render(o={0: 1}) == "0=1"
def test_simple_map(self):
env = Environment()
tmpl = env.from_string('{{ ["1", "2", "3"]|map("int")|sum }}')
self.assertEqual(tmpl.render(), '6')
def test_attribute_map(self):
class User(object):
def __init__(self, name):
self.name = name
env = Environment()
users = [
User('john'),
User('jane'),
User('mike'),
]
tmpl = env.from_string('{{ users|map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|jane|mike')
def test_empty_map(self):
env = Environment()
tmpl = env.from_string('{{ none|map("upper")|list }}')
self.assertEqual(tmpl.render(), '[]')
def test_simple_select(self):
env = Environment()
tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|select("odd")|join("|") }}')
self.assertEqual(tmpl.render(), '1|3|5')
def test_bool_select(self):
env = Environment()
tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|select|join("|") }}')
self.assertEqual(tmpl.render(), '1|2|3|4|5')
def test_simple_reject(self):
env = Environment()
tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|reject("odd")|join("|") }}')
self.assertEqual(tmpl.render(), '2|4')
def test_bool_reject(self):
env = Environment()
tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|reject|join("|") }}')
self.assertEqual(tmpl.render(), 'None|False|0')
def test_simple_select_attr(self):
class User(object):
def __init__(self, name, is_active):
self.name = name
self.is_active = is_active
env = Environment()
users = [
User('john', True),
User('jane', True),
User('mike', False),
]
tmpl = env.from_string('{{ users|selectattr("is_active")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|jane')
def test_simple_reject_attr(self):
class User(object):
def __init__(self, name, is_active):
self.name = name
self.is_active = is_active
env = Environment()
users = [
User('john', True),
User('jane', True),
User('mike', False),
]
tmpl = env.from_string('{{ users|rejectattr("is_active")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'mike')
def test_func_select_attr(self):
class User(object):
def __init__(self, id, name):
self.id = id
self.name = name
env = Environment()
users = [
User(1, 'john'),
User(2, 'jane'),
User(3, 'mike'),
]
tmpl = env.from_string('{{ users|selectattr("id", "odd")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|mike')
def test_func_reject_attr(self):
class User(object):
def __init__(self, id, name):
self.id = id
self.name = name
env = Environment()
users = [
User(1, 'john'),
User(2, 'jane'),
User(3, 'mike'),
]
tmpl = env.from_string('{{ users|rejectattr("id", "odd")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'jane')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(FilterTestCase))
return suite
| mit | 1,293,905,689,546,073,600 | 36.221359 | 88 | 0.474151 | false |
i945/An | An/extra_apps/xadmin/migrations/0002_log.py | 15 | 1849 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-15 05:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('xadmin', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Log',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action_time', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='action time')),
('ip_addr', models.GenericIPAddressField(blank=True, null=True, verbose_name='action ip')),
('object_id', models.TextField(blank=True, null=True, verbose_name='object id')),
('object_repr', models.CharField(max_length=200, verbose_name='object repr')),
('action_flag', models.PositiveSmallIntegerField(verbose_name='action flag')),
('message', models.TextField(blank=True, verbose_name='change message')),
('content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='contenttypes.ContentType', verbose_name='content type')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
options={
'ordering': ('-action_time',),
'verbose_name': 'log entry',
'verbose_name_plural': 'log entries',
},
),
]
| mit | 4,311,930,304,461,944,300 | 46.410256 | 181 | 0.619254 | false |
houzhenggang/OpenWRT-1 | scripts/dl_cleanup.py | 202 | 5942 | #!/usr/bin/env python
"""
# OpenWRT download directory cleanup utility.
# Delete all but the very last version of the program tarballs.
#
# Copyright (C) 2010 Michael Buesch <mb@bu3sch.de>
# Copyright (C) 2013 OpenWrt.org
"""
import sys
import os
import re
import getopt
# Commandline options
opt_dryrun = False
def parseVer_1234(match, filepath):
progname = match.group(1)
progversion = (int(match.group(2)) << 64) |\
(int(match.group(3)) << 48) |\
(int(match.group(4)) << 32) |\
(int(match.group(5)) << 16)
return (progname, progversion)
def parseVer_123(match, filepath):
progname = match.group(1)
try:
patchlevel = match.group(5)
except (IndexError), e:
patchlevel = None
if patchlevel:
patchlevel = ord(patchlevel[0])
else:
patchlevel = 0
progversion = (int(match.group(2)) << 64) |\
(int(match.group(3)) << 48) |\
(int(match.group(4)) << 32) |\
patchlevel
return (progname, progversion)
def parseVer_12(match, filepath):
progname = match.group(1)
try:
patchlevel = match.group(4)
except (IndexError), e:
patchlevel = None
if patchlevel:
patchlevel = ord(patchlevel[0])
else:
patchlevel = 0
progversion = (int(match.group(2)) << 64) |\
(int(match.group(3)) << 48) |\
patchlevel
return (progname, progversion)
def parseVer_r(match, filepath):
progname = match.group(1)
progversion = (int(match.group(2)) << 64)
return (progname, progversion)
def parseVer_ymd(match, filepath):
progname = match.group(1)
progversion = (int(match.group(2)) << 64) |\
(int(match.group(3)) << 48) |\
(int(match.group(4)) << 32)
return (progname, progversion)
def parseVer_GIT(match, filepath):
progname = match.group(1)
st = os.stat(filepath)
progversion = int(st.st_mtime) << 64
return (progname, progversion)
extensions = (
".tar.gz",
".tar.bz2",
".tar.xz",
".orig.tar.gz",
".orig.tar.bz2",
".orig.tar.xz",
".zip",
".tgz",
".tbz",
".txz",
)
versionRegex = (
(re.compile(r"(.+)[-_](\d+)\.(\d+)\.(\d+)\.(\d+)"), parseVer_1234), # xxx-1.2.3.4
(re.compile(r"(.+)[-_](\d\d\d\d)-?(\d\d)-?(\d\d)"), parseVer_ymd), # xxx-YYYY-MM-DD
(re.compile(r"(.+)[-_]([0-9a-fA-F]{40,40})"), parseVer_GIT), # xxx-GIT_SHASUM
(re.compile(r"(.+)[-_](\d+)\.(\d+)\.(\d+)(\w?)"), parseVer_123), # xxx-1.2.3a
(re.compile(r"(.+)[-_](\d+)_(\d+)_(\d+)"), parseVer_123), # xxx-1_2_3
(re.compile(r"(.+)[-_](\d+)\.(\d+)(\w?)"), parseVer_12), # xxx-1.2a
(re.compile(r"(.+)[-_]r?(\d+)"), parseVer_r), # xxx-r1111
)
blacklist = [
("linux", re.compile(r"linux-.*")),
("gcc", re.compile(r"gcc-.*")),
("wl_apsta", re.compile(r"wl_apsta.*")),
(".fw", re.compile(r".*\.fw")),
(".arm", re.compile(r".*\.arm")),
(".bin", re.compile(r".*\.bin")),
("rt-firmware", re.compile(r"RT[\d\w]+_Firmware.*")),
]
class EntryParseError(Exception): pass
class Entry:
def __init__(self, directory, filename):
self.directory = directory
self.filename = filename
self.progname = ""
self.fileext = ""
for ext in extensions:
if filename.endswith(ext):
filename = filename[0:0-len(ext)]
self.fileext = ext
break
else:
print self.filename, "has an unknown file-extension"
raise EntryParseError("ext")
for (regex, parseVersion) in versionRegex:
match = regex.match(filename)
if match:
(self.progname, self.version) = parseVersion(
match, directory + "/" + filename + self.fileext)
break
else:
print self.filename, "has an unknown version pattern"
raise EntryParseError("ver")
def deleteFile(self):
path = (self.directory + "/" + self.filename).replace("//", "/")
print "Deleting", path
if not opt_dryrun:
os.unlink(path)
def __eq__(self, y):
return self.filename == y.filename
def __ge__(self, y):
return self.version >= y.version
def usage():
print "OpenWRT download directory cleanup utility"
print "Usage: " + sys.argv[0] + " [OPTIONS] <path/to/dl>"
print ""
print " -d|--dry-run Do a dry-run. Don't delete any files"
print " -B|--show-blacklist Show the blacklist and exit"
print " -w|--whitelist ITEM Remove ITEM from blacklist"
def main(argv):
global opt_dryrun
try:
(opts, args) = getopt.getopt(argv[1:],
"hdBw:",
[ "help", "dry-run", "show-blacklist", "whitelist=", ])
if len(args) != 1:
usage()
return 1
except getopt.GetoptError:
usage()
return 1
directory = args[0]
for (o, v) in opts:
if o in ("-h", "--help"):
usage()
return 0
if o in ("-d", "--dry-run"):
opt_dryrun = True
if o in ("-w", "--whitelist"):
for i in range(0, len(blacklist)):
(name, regex) = blacklist[i]
if name == v:
del blacklist[i]
break
else:
print "Whitelist error: Item", v,\
"is not in blacklist"
return 1
if o in ("-B", "--show-blacklist"):
for (name, regex) in blacklist:
print name
return 0
# Create a directory listing and parse the file names.
entries = []
for filename in os.listdir(directory):
if filename == "." or filename == "..":
continue
for (name, regex) in blacklist:
if regex.match(filename):
if opt_dryrun:
print filename, "is blacklisted"
break
else:
try:
entries.append(Entry(directory, filename))
except (EntryParseError), e: pass
# Create a map of programs
progmap = {}
for entry in entries:
if entry.progname in progmap.keys():
progmap[entry.progname].append(entry)
else:
progmap[entry.progname] = [entry,]
# Traverse the program map and delete everything but the last version
for prog in progmap:
lastVersion = None
versions = progmap[prog]
for version in versions:
if lastVersion is None or version >= lastVersion:
lastVersion = version
if lastVersion:
for version in versions:
if version != lastVersion:
version.deleteFile()
if opt_dryrun:
print "Keeping", lastVersion.filename
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| gpl-2.0 | -406,450,484,268,978,940 | 24.722944 | 84 | 0.616627 | false |
tianchaijz/MTHTTPServerWFM | MTHTTPServerWFM.py | 1 | 19948 | #!/usr/bin/env python
# encoding: utf-8
"""Multiple Threading HTTP Server With File Management.
This program is extended from the standard `SimpleHTTPServer` module by adding
upload and delete file features.
"""
__version__ = "0.31"
__all__ = ["HTTPRequestHandlerWFM"]
__author__ = "Jinzheng Zhang"
__email__ = "tianchaijz@gmail.com"
__git__ = "https://github.com/tianchaijz/MTHTTPServerWFM"
import os
import sys
import re
import cgi
import json
import shutil
import socket
import urllib
import hashlib
import logging
import mimetypes
import posixpath
import threading
from copy import deepcopy
from SocketServer import ThreadingMixIn
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# ============================== Config ==============================
ENC = sys.stdout.encoding
ENC_MAP = {"cp936": "gbk"}
CHARSET = ENC_MAP.get(ENC, "utf-8")
reload(sys)
sys.setdefaultencoding("utf-8")
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s] [%(levelname)s] - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
FILE_NAME = os.path.basename(__file__).split('.')[0]
WORK_PATH = sys.argv[2] if sys.argv[2:] else os.getcwd()
# ====================================================================
class HTMLStyle(object):
CSS = """
body { background:#FFF; color:#000;
font-family:Helvetica, Arial, sans-serif; }
h1 { margin:.5em 0 0; }
h2 { margin:.8em 0 .3em; }
h3 { margin:.5em 0 .3em; }
table { font-size:.8em; border-collapse:collapse;
border-bottom:1px solid #DDEEDD; width:100%; margin:.5em 0; }
thead th { font-size:1em; background:#DDEEDD;
border:.2em solid #FFFFFF; padding:.1em .3em; }
tbody tr.odd { background:#F5F5F5; }
tbody th { text-align:left; }
tbody td { height:1.2em; text-align:right; }
"""
GETPAGE = """
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
<title>Directory listing for {directory}</title>
<style>{css}</style>
</head>
<body>
<div>
<h3><a href="/">Home</a> Directory listing for {directory}</h3>
</div>
<div>
<hr color="#DDEEDD">
<form enctype="multipart/form-data" method="post">
Upload File: <input name="file" type="file"/>
<input type="submit" value="Upload"/>
</form>
</div>
<div>
<hr color="#DDEEDD">
<form action="/delete" method="post">
Delete File: <input type="text" name="filename">
<input type="submit" value="Submit">
</form>
</div>
<div>
<hr color="#DDEEDD">
<table>
<thead>
<tr> <th rowspan="2">NAME</th> <th colspan="2">INFO</th> </tr>
<tr> <th>SIZE</th> <th>SHA1SUM</th> </tr>
</thead>
"""
POSTPAGE = """
<!DOCTYPE html>
<html>
<head> <meta charset="utf-8"/> <title>Result Page</title> </head>
<body>
<h3>Result:</h3>
<hr color="#DDEEDD">
<strong>{result}: </strong>
{msg}
<hr color="#DDEEDD"><br><a href="{refer}">Go Back</a>
<body>
</html>
"""
TBODY = """
<tbody>
{tr_class}
<th><a href="{linkname}">{displayname}</a></th>
<td>{size}</td> <td>{sha1sum}</td>
</tr>
</tbody>
"""
def __init__(self):
self.count = 0
def gen_getpage(self, **kwargs):
kwargs["css"] = HTMLStyle.CSS
return HTMLStyle.GETPAGE.format(**kwargs)
def gen_postpage(self, **kwargs):
return HTMLStyle.POSTPAGE.format(**kwargs)
def gen_table_body(self, **kwargs):
self.count = 1 - self.count
kwargs["tr_class"] = '<tr class="odd">' if self.count > 0 else '<tr>'
return HTMLStyle.TBODY.format(**kwargs)
class FileInfoHandler(object):
FILE_LOCK = threading.Lock()
def __init__(self):
self.info_file = os.path.join(WORK_PATH, "__%s.json" % FILE_NAME)
self.lock = threading.Lock()
self.info, self.oldinfo = {}, {}
threading.Thread(
target=self._load_info, name="Thread: Load File Info",
).start()
def _load_info(self):
try:
with FileInfoHandler.FILE_LOCK:
with open(self.info_file, 'rb') as fd:
info = json.load(fd, encoding=ENC)
except IOError, e:
pass
except Exception, e:
logging.exception(str(e))
self.flush_info()
else:
logging.info("Load file info success")
self.info, self.oldinfo = info, deepcopy(info)
def _do_flush(self):
with FileInfoHandler.FILE_LOCK:
try:
with open(self.info_file, 'wb') as fd:
json.dump(self.info, fd, encoding=ENC)
except IOError:
pass
except Exception, e:
logging.exception(str(e))
def _gen_info(self, file):
def hashfile(fd, hasher, blocksize=65536):
buf = fd.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = fd.read(blocksize)
return hasher.hexdigest()
try:
logging.debug("Add file info: %s" % file)
size = str(os.path.getsize(file))
mtime = str(os.path.getmtime(file))
with open(file, 'rb') as fd:
sha1sum = hashfile(fd, hashlib.sha1())
with self.lock:
self.info[file] = {
"sha1sum": sha1sum, "size": size, "mtime": mtime
}
self._do_flush()
except IOError, e:
logging.exception("%s: %s" % (file, str(e)))
def get_info(self, file):
file_info = self.info.get(file, False)
if file_info:
file_mtime = os.path.getmtime(file)
if str(file_mtime) != file_info["mtime"]:
logging.debug("Update file info: %s" % file)
self.add_info(file)
return file_info
else:
if os.path.isfile(file):
self.add_info(file)
return self.dummy_info()
def del_info(self, file):
with self.lock:
try:
del self.info[file]
logging.info("Delete file info: %s" % file)
self._do_flush()
except KeyError:
logging.exception("%s not found" % file)
except ValueError, e:
logging.exception(str(e))
def add_info(self, file):
thread = threading.Thread(
target=self._gen_info,
args=(file,),
name="Thread - " + file,
)
thread.daemon = True
thread.start()
def flush_info(self):
with self.lock:
self._do_flush()
def need_flush(self):
return bool(set(self.info) - set(self.oldinfo))
def dummy_info(self):
return {"size": '', "sha1sum": ''}
class HTTPRequestHandlerWFM(BaseHTTPRequestHandler):
"""HTTP request handler with GET, HEAD and POST commands.
This serves files from the current directory and any of its
subdirectories. The MIME type for files is determined by
calling the .guess_type() method.
The GET, HEAD and POST requests are identical except that the HEAD
request omits the actual contents of the file.
"""
server_version = "%s/%s" % (FILE_NAME, __version__)
CWD = WORK_PATH
FIH = FileInfoHandler()
HS = HTMLStyle()
def __init__(self, *args, **kwargs):
logging.debug("__init__ %s" % (self.__class__.__name__))
self.fih = HTTPRequestHandlerWFM.FIH
self.hs = HTTPRequestHandlerWFM.HS
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def do_GET(self):
"""Serve a GET request."""
logging.debug("Current thread: %s" % threading.current_thread())
f = self.send_head()
if f:
try:
self.copyfile(f, self.wfile)
finally:
f.close()
def do_HEAD(self):
"""Serve a HEAD request."""
f = self.send_head()
if f:
f.close()
def do_POST(self):
"""Serve a POST request."""
def parse_post_data():
if self.path == "/delete":
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": self.headers["Content-Type"],
}
)
filename = form.getvalue("filename")
if filename is None:
return (False, "no file specified")
filename = urllib.unquote(filename).decode("utf-8")
fullname = os.path.join(HTTPRequestHandlerWFM.CWD, filename)
try:
os.remove(fullname)
logging.warn("Delete file: %s" %
self.real_path(fullname.encode(ENC)))
self.fih.del_info(fullname)
return (True, "file: %s deleted" %
self.real_path(fullname))
except OSError, e:
return (False, str(e).decode("string_escape"))
else:
return self.deal_post_file()
res, msg = parse_post_data()
logging.info("Post %s, %s by %s"
% (res, msg, self.client_address))
f = StringIO()
postpage = self.hs.gen_postpage(
result=str(res), msg=msg, refer=self.headers["Referer"]
)
f.write(postpage)
length = f.tell()
f.seek(0)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(length))
self.end_headers()
if f:
self.copyfile(f, self.wfile)
f.close()
def deal_post_file(self):
self.is_upload = True
try:
boundary = self.headers.plisttext.split("=")[1]
except IndexError:
self.is_upload = False
if self.is_upload:
content_length = remainbytes = int(self.headers["Content-Length"])
line = self.rfile.readline()
remainbytes -= len(line)
if boundary not in line:
return (False, "content can't begin with boundary")
line = self.rfile.readline()
remainbytes -= len(line)
fn = re.findall(
r'Content-Disposition.*name="file"; filename="(.+)"',
line
)
if not fn:
return (False, "can't find out the file name")
path = self.translate_path(self.path)
fn = os.path.join(path, fn[0].decode("utf-8"))
while os.path.exists(fn):
fn += "_"
line = self.rfile.readline()
remainbytes -= len(line)
line = self.rfile.readline()
remainbytes -= len(line)
try:
out = open(fn, 'wb')
logging.info("Post file: %s, Content-Length: %d" %
(self.real_path(fn.encode(ENC)), content_length))
logging.info("Write to file: %s" %
self.real_path(fn.encode(ENC)))
except IOError, e:
return (False, "can't write file: %s" % str(e))
preline = self.rfile.readline()
remainbytes -= len(preline)
while remainbytes > 0:
line = self.rfile.readline()
remainbytes -= len(line)
if boundary in line:
preline = preline[0:-1]
if preline.endswith('\r'):
preline = preline[0:-1]
out.write(preline)
out.close()
return (True, "file '%s' uploaded" % fn)
else:
out.write(preline)
preline = line
return (False, "unexpect ends of data.")
else:
body = self.rfile.read()
return (False, "unknow post data: %s ..." % body[0:9])
def send_head(self):
"""Common code for GET and HEAD commands.
This sends the response code and MIME headers.
Return value is either a file object (which has to be copied
to the outputfile by the caller unless the command was HEAD,
and must be closed by the caller under all circumstances), or
None, in which case the caller has nothing further to do.
"""
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
if not self.path.endswith('/'):
# redirect browser - doing basically what apache does
self.send_response(301)
self.send_header("Location", self.path + "/")
self.end_headers()
return None
for index in "index.html", "index.htm":
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
else:
HTTPRequestHandlerWFM.CWD = path
return self.list_directory(path)
ctype = "%s; charset=%s" % (self.guess_type(path), CHARSET)
try:
# Always read in binary mode. Opening files in text mode may cause
# newline translations, making the actual size of the content
# transmitted *less* than the content-length!
f = open(path, 'rb')
logging.info("Get file: %s" % self.real_path(path.encode(ENC)))
except IOError, e:
self.send_error(404, str(e))
return None
try:
self.send_response(200)
self.send_header("Content-type", ctype)
fs = os.fstat(f.fileno())
self.send_header("Content-Length", str(fs[6]))
self.send_header(
"Last-Modified", self.date_time_string(fs.st_mtime))
self.end_headers()
return f
except:
f.close()
raise
def real_path(self, path):
return os.path.relpath(path, HTTPRequestHandlerWFM.CWD)
def list_directory(self, path):
"""Helper to produce a directory listing (absent index.html).
Return value is either a file object, or None (indicating an
error). In either case, the headers are sent, making the
interface the same as for send_head().
"""
try:
files = os.listdir(path)
list = map(lambda s:
(s if isinstance(s, unicode) else s.decode(ENC)), files)
logging.info("Get directory: %s" %
self.real_path(path.encode(ENC)))
except os.error:
self.send_error(403, "No permission to list directory")
return None
list.sort(key=lambda a: a.lower())
f = StringIO()
displaypath = cgi.escape(urllib.unquote(self.path))
f.write(self.hs.gen_getpage(directory=displaypath))
for name in list:
fullname = os.path.join(path, name)
displayname = linkname = name
info = self.fih.get_info(fullname)
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
f.write(self.hs.gen_table_body(
linkname=urllib.quote(linkname.encode("utf-8")),
displayname=cgi.escape(displayname.encode("utf-8")),
**info
))
f.write("\n".join(["</table>", "</div>", "</body>", "</html>"]))
length = f.tell()
f.seek(0)
self.send_response(200)
self.send_header("Content-Length", str(length))
self.end_headers()
if self.fih.need_flush():
self.fih.flush_info()
return f
def translate_path(self, path):
"""Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)
"""
# abandon query parameters
path = path.split('?', 1)[0]
path = path.split('#', 1)[0]
# Don't forget explicit trailing slash when normalizing. Issue17324
trailing_slash = path.rstrip().endswith('/')
path = posixpath.normpath(urllib.unquote(path).decode("utf-8"))
words = path.split('/')
words = filter(None, words)
path = os.getcwd()
for word in words:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in (os.curdir, os.pardir):
continue
path = os.path.join(path, word)
if trailing_slash:
path += '/'
return path
def copyfile(self, source, outputfile):
"""Copy all data between two file objects.
The SOURCE argument is a file object open for reading
(or anything with a read() method) and the DESTINATION
argument is a file object open for writing (or
anything with a write() method).
The only reason for overriding this would be to change
the block size or perhaps to replace newlines by CRLF
-- note however that this the default server uses this
to copy binary data as well.
"""
shutil.copyfileobj(source, outputfile)
def guess_type(self, path):
"""Guess the type of a file.
Argument is a PATH (a filename).
Return value is a string of the form type/subtype,
usable for a MIME Content-type header.
The default implementation looks the file's extension
up in the table self.extensions_map, using application/octet-stream
as a default; however it would be permissible (if
slow) to look inside the data to make a better guess.
"""
base, ext = posixpath.splitext(path)
if ext in self.extensions_map:
return self.extensions_map[ext]
ext = ext.lower()
if ext in self.extensions_map:
return self.extensions_map[ext]
else:
return self.extensions_map['']
if not mimetypes.inited:
mimetypes.init() # try to read system mime.types
extensions_map = mimetypes.types_map.copy()
extensions_map.update({
'': 'application/octet-stream', # Default
'.c': 'text/plain',
'.h': 'text/plain',
'.sh': 'text/plain',
'.py': 'text/plain',
'.txt': 'text/plain',
'.lua': 'text/plain',
'.json': 'application/json',
})
def log_request(self, code='-'):
sys.stdout.write("Status: %s\n" % str(code))
class MultiThreadingServer(ThreadingMixIn, HTTPServer):
pass
def main():
if sys.argv[1:]:
port = int(sys.argv[1])
else:
port = 8000
if sys.argv[2:]:
os.chdir(sys.argv[2])
server_address = ('', port)
server = MultiThreadingServer(server_address, HTTPRequestHandlerWFM)
sa = server.socket.getsockname()
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
logging.info("IP address: %s, pid: %d" %
(s.getsockname()[0], os.getpid()))
s.close()
except:
pass
logging.info("Serving HTTP on: %s, port: %d" % (sa[0], sa[1]))
try:
server.serve_forever()
except KeyboardInterrupt:
print
logging.info("Serving Finished")
if __name__ == '__main__':
main()
| mit | -3,933,337,580,770,203,600 | 31.278317 | 79 | 0.536595 | false |
siosio/intellij-community | plugins/hg4idea/testData/bin/mercurial/verify.py | 93 | 10933 | # verify.py - repository integrity checking for Mercurial
#
# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import nullid, short
from i18n import _
import os
import revlog, util, error
def verify(repo):
lock = repo.lock()
try:
return _verify(repo)
finally:
lock.release()
def _normpath(f):
# under hg < 2.4, convert didn't sanitize paths properly, so a
# converted repo may contain repeated slashes
while '//' in f:
f = f.replace('//', '/')
return f
def _verify(repo):
repo = repo.unfiltered()
mflinkrevs = {}
filelinkrevs = {}
filenodes = {}
revisions = 0
badrevs = set()
errors = [0]
warnings = [0]
ui = repo.ui
cl = repo.changelog
mf = repo.manifest
lrugetctx = util.lrucachefunc(repo.changectx)
if not repo.cancopy():
raise util.Abort(_("cannot verify bundle or remote repos"))
def err(linkrev, msg, filename=None):
if linkrev is not None:
badrevs.add(linkrev)
else:
linkrev = '?'
msg = "%s: %s" % (linkrev, msg)
if filename:
msg = "%s@%s" % (filename, msg)
ui.warn(" " + msg + "\n")
errors[0] += 1
def exc(linkrev, msg, inst, filename=None):
if isinstance(inst, KeyboardInterrupt):
ui.warn(_("interrupted"))
raise
if not str(inst):
inst = repr(inst)
err(linkrev, "%s: %s" % (msg, inst), filename)
def warn(msg):
ui.warn(msg + "\n")
warnings[0] += 1
def checklog(obj, name, linkrev):
if not len(obj) and (havecl or havemf):
err(linkrev, _("empty or missing %s") % name)
return
d = obj.checksize()
if d[0]:
err(None, _("data length off by %d bytes") % d[0], name)
if d[1]:
err(None, _("index contains %d extra bytes") % d[1], name)
if obj.version != revlog.REVLOGV0:
if not revlogv1:
warn(_("warning: `%s' uses revlog format 1") % name)
elif revlogv1:
warn(_("warning: `%s' uses revlog format 0") % name)
def checkentry(obj, i, node, seen, linkrevs, f):
lr = obj.linkrev(obj.rev(node))
if lr < 0 or (havecl and lr not in linkrevs):
if lr < 0 or lr >= len(cl):
msg = _("rev %d points to nonexistent changeset %d")
else:
msg = _("rev %d points to unexpected changeset %d")
err(None, msg % (i, lr), f)
if linkrevs:
if f and len(linkrevs) > 1:
try:
# attempt to filter down to real linkrevs
linkrevs = [l for l in linkrevs
if lrugetctx(l)[f].filenode() == node]
except Exception:
pass
warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
lr = None # can't be trusted
try:
p1, p2 = obj.parents(node)
if p1 not in seen and p1 != nullid:
err(lr, _("unknown parent 1 %s of %s") %
(short(p1), short(node)), f)
if p2 not in seen and p2 != nullid:
err(lr, _("unknown parent 2 %s of %s") %
(short(p2), short(node)), f)
except Exception, inst:
exc(lr, _("checking parents of %s") % short(node), inst, f)
if node in seen:
err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f)
seen[node] = i
return lr
if os.path.exists(repo.sjoin("journal")):
ui.warn(_("abandoned transaction found - run hg recover\n"))
revlogv1 = cl.version != revlog.REVLOGV0
if ui.verbose or not revlogv1:
ui.status(_("repository uses revlog format %d\n") %
(revlogv1 and 1 or 0))
havecl = len(cl) > 0
havemf = len(mf) > 0
ui.status(_("checking changesets\n"))
refersmf = False
seen = {}
checklog(cl, "changelog", 0)
total = len(repo)
for i in repo:
ui.progress(_('checking'), i, total=total, unit=_('changesets'))
n = cl.node(i)
checkentry(cl, i, n, seen, [i], "changelog")
try:
changes = cl.read(n)
if changes[0] != nullid:
mflinkrevs.setdefault(changes[0], []).append(i)
refersmf = True
for f in changes[3]:
filelinkrevs.setdefault(_normpath(f), []).append(i)
except Exception, inst:
refersmf = True
exc(i, _("unpacking changeset %s") % short(n), inst)
ui.progress(_('checking'), None)
ui.status(_("checking manifests\n"))
seen = {}
if refersmf:
# Do not check manifest if there are only changelog entries with
# null manifests.
checklog(mf, "manifest", 0)
total = len(mf)
for i in mf:
ui.progress(_('checking'), i, total=total, unit=_('manifests'))
n = mf.node(i)
lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
if n in mflinkrevs:
del mflinkrevs[n]
else:
err(lr, _("%s not in changesets") % short(n), "manifest")
try:
for f, fn in mf.readdelta(n).iteritems():
if not f:
err(lr, _("file without name in manifest"))
elif f != "/dev/null":
filenodes.setdefault(_normpath(f), {}).setdefault(fn, lr)
except Exception, inst:
exc(lr, _("reading manifest delta %s") % short(n), inst)
ui.progress(_('checking'), None)
ui.status(_("crosschecking files in changesets and manifests\n"))
total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes)
count = 0
if havemf:
for c, m in sorted([(c, m) for m in mflinkrevs
for c in mflinkrevs[m]]):
count += 1
if m == nullid:
continue
ui.progress(_('crosschecking'), count, total=total)
err(c, _("changeset refers to unknown manifest %s") % short(m))
mflinkrevs = None # del is bad here due to scope issues
for f in sorted(filelinkrevs):
count += 1
ui.progress(_('crosschecking'), count, total=total)
if f not in filenodes:
lr = filelinkrevs[f][0]
err(lr, _("in changeset but not in manifest"), f)
if havecl:
for f in sorted(filenodes):
count += 1
ui.progress(_('crosschecking'), count, total=total)
if f not in filelinkrevs:
try:
fl = repo.file(f)
lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
except Exception:
lr = None
err(lr, _("in manifest but not in changeset"), f)
ui.progress(_('crosschecking'), None)
ui.status(_("checking files\n"))
storefiles = set()
for f, f2, size in repo.store.datafiles():
if not f:
err(None, _("cannot decode filename '%s'") % f2)
elif size > 0 or not revlogv1:
storefiles.add(_normpath(f))
files = sorted(set(filenodes) | set(filelinkrevs))
total = len(files)
for i, f in enumerate(files):
ui.progress(_('checking'), i, item=f, total=total)
try:
linkrevs = filelinkrevs[f]
except KeyError:
# in manifest but not in changelog
linkrevs = []
if linkrevs:
lr = linkrevs[0]
else:
lr = None
try:
fl = repo.file(f)
except error.RevlogError, e:
err(lr, _("broken revlog! (%s)") % e, f)
continue
for ff in fl.files():
try:
storefiles.remove(ff)
except KeyError:
err(lr, _("missing revlog!"), ff)
checklog(fl, f, lr)
seen = {}
rp = None
for i in fl:
revisions += 1
n = fl.node(i)
lr = checkentry(fl, i, n, seen, linkrevs, f)
if f in filenodes:
if havemf and n not in filenodes[f]:
err(lr, _("%s not in manifests") % (short(n)), f)
else:
del filenodes[f][n]
# verify contents
try:
l = len(fl.read(n))
rp = fl.renamed(n)
if l != fl.size(i):
if len(fl.revision(n)) != fl.size(i):
err(lr, _("unpacked size is %s, %s expected") %
(l, fl.size(i)), f)
except Exception, inst:
exc(lr, _("unpacking %s") % short(n), inst, f)
# check renames
try:
if rp:
if lr is not None and ui.verbose:
ctx = lrugetctx(lr)
found = False
for pctx in ctx.parents():
if rp[0] in pctx:
found = True
break
if not found:
warn(_("warning: copy source of '%s' not"
" in parents of %s") % (f, ctx))
fl2 = repo.file(rp[0])
if not len(fl2):
err(lr, _("empty or missing copy source revlog %s:%s")
% (rp[0], short(rp[1])), f)
elif rp[1] == nullid:
ui.note(_("warning: %s@%s: copy source"
" revision is nullid %s:%s\n")
% (f, lr, rp[0], short(rp[1])))
else:
fl2.rev(rp[1])
except Exception, inst:
exc(lr, _("checking rename of %s") % short(n), inst, f)
# cross-check
if f in filenodes:
fns = [(lr, n) for n, lr in filenodes[f].iteritems()]
for lr, node in sorted(fns):
err(lr, _("%s in manifests not found") % short(node), f)
ui.progress(_('checking'), None)
for f in storefiles:
warn(_("warning: orphan revlog '%s'") % f)
ui.status(_("%d files, %d changesets, %d total revisions\n") %
(len(files), len(cl), revisions))
if warnings[0]:
ui.warn(_("%d warnings encountered!\n") % warnings[0])
if errors[0]:
ui.warn(_("%d integrity errors encountered!\n") % errors[0])
if badrevs:
ui.warn(_("(first damaged changeset appears to be %d)\n")
% min(badrevs))
return 1
| apache-2.0 | -4,930,357,445,557,989,000 | 33.380503 | 78 | 0.480198 | false |
leiferikb/bitpop | depot_tools/third_party/pylint/reporters/html.py | 20 | 2541 | # Copyright (c) 2003-2006 Sylvain Thenault (thenault@gmail.com).
# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""HTML reporter"""
import sys
from cgi import escape
from logilab.common.ureports import HTMLWriter, Section, Table
from pylint.interfaces import IReporter
from pylint.reporters import BaseReporter
class HTMLReporter(BaseReporter):
"""report messages and layouts in HTML"""
__implements__ = IReporter
extension = 'html'
def __init__(self, output=sys.stdout):
BaseReporter.__init__(self, output)
self.msgs = []
def add_message(self, msg_id, location, msg):
"""manage message of different type and in the context of path"""
module, obj, line, col_offset = location[1:]
if self.include_ids:
sigle = msg_id
else:
sigle = msg_id[0]
self.msgs += [sigle, module, obj, str(line), str(col_offset), escape(msg)]
def set_output(self, output=None):
"""set output stream
messages buffered for old output is processed first"""
if self.out and self.msgs:
self._display(Section())
BaseReporter.set_output(self, output)
def _display(self, layout):
"""launch layouts display
overridden from BaseReporter to add insert the messages section
(in add_message, message is not displayed, just collected so it
can be displayed in an html table)
"""
if self.msgs:
# add stored messages to the layout
msgs = ['type', 'module', 'object', 'line', 'col_offset', 'message']
msgs += self.msgs
sect = Section('Messages')
layout.append(sect)
sect.append(Table(cols=6, children=msgs, rheaders=1))
self.msgs = []
HTMLWriter().format(layout, self.out)
| gpl-3.0 | -2,432,774,537,931,149,300 | 35.826087 | 82 | 0.658796 | false |
CMPUT410W15T02/CMPUT410W15-project | testenv/lib/python2.7/site-packages/django/contrib/contenttypes/forms.py | 93 | 3837 | from __future__ import unicode_literals
from django.db import models
from django.forms import ModelForm, modelformset_factory
from django.forms.models import BaseModelFormSet
from django.contrib.contenttypes.models import ContentType
class BaseGenericInlineFormSet(BaseModelFormSet):
"""
A formset for generic inline objects to a parent.
"""
def __init__(self, data=None, files=None, instance=None, save_as_new=None,
prefix=None, queryset=None, **kwargs):
opts = self.model._meta
self.instance = instance
self.rel_name = '-'.join((
opts.app_label, opts.model_name,
self.ct_field.name, self.ct_fk_field.name,
))
if self.instance is None or self.instance.pk is None:
qs = self.model._default_manager.none()
else:
if queryset is None:
queryset = self.model._default_manager
qs = queryset.filter(**{
self.ct_field.name: ContentType.objects.get_for_model(
self.instance, for_concrete_model=self.for_concrete_model),
self.ct_fk_field.name: self.instance.pk,
})
super(BaseGenericInlineFormSet, self).__init__(
queryset=qs, data=data, files=files,
prefix=prefix,
**kwargs
)
@classmethod
def get_default_prefix(cls):
opts = cls.model._meta
return '-'.join(
(opts.app_label, opts.model_name,
cls.ct_field.name, cls.ct_fk_field.name)
)
def save_new(self, form, commit=True):
setattr(form.instance, self.ct_field.get_attname(),
ContentType.objects.get_for_model(self.instance).pk)
setattr(form.instance, self.ct_fk_field.get_attname(),
self.instance.pk)
return form.save(commit=commit)
def generic_inlineformset_factory(model, form=ModelForm,
formset=BaseGenericInlineFormSet,
ct_field="content_type", fk_field="object_id",
fields=None, exclude=None,
extra=3, can_order=False, can_delete=True,
max_num=None, formfield_callback=None,
validate_max=False, for_concrete_model=True,
min_num=None, validate_min=False):
"""
Returns a ``GenericInlineFormSet`` for the given kwargs.
You must provide ``ct_field`` and ``fk_field`` if they are different from
the defaults ``content_type`` and ``object_id`` respectively.
"""
opts = model._meta
# if there is no field called `ct_field` let the exception propagate
ct_field = opts.get_field(ct_field)
if not isinstance(ct_field, models.ForeignKey) or ct_field.rel.to != ContentType:
raise Exception("fk_name '%s' is not a ForeignKey to ContentType" % ct_field)
fk_field = opts.get_field(fk_field) # let the exception propagate
if exclude is not None:
exclude = list(exclude)
exclude.extend([ct_field.name, fk_field.name])
else:
exclude = [ct_field.name, fk_field.name]
FormSet = modelformset_factory(model, form=form,
formfield_callback=formfield_callback,
formset=formset,
extra=extra, can_delete=can_delete, can_order=can_order,
fields=fields, exclude=exclude, max_num=max_num,
validate_max=validate_max, min_num=min_num,
validate_min=validate_min)
FormSet.ct_field = ct_field
FormSet.ct_fk_field = fk_field
FormSet.for_concrete_model = for_concrete_model
return FormSet
| gpl-2.0 | 1,935,249,024,444,285,200 | 42.11236 | 91 | 0.576753 | false |
kelvin13/Knockout | pygments/lexers/configs.py | 21 | 27854 | # -*- coding: utf-8 -*-
"""
pygments.lexers.configs
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for configuration file formats.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, default, words, bygroups, include, using
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace, Literal
from pygments.lexers.shell import BashLexer
__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
'PkgConfigLexer', 'PacmanConfLexer']
class IniLexer(RegexLexer):
"""
Lexer for configuration files in INI style.
"""
name = 'INI'
aliases = ['ini', 'cfg', 'dosini']
filenames = ['*.ini', '*.cfg', '*.inf']
mimetypes = ['text/x-ini', 'text/inf']
tokens = {
'root': [
(r'\s+', Text),
(r'[;#].*', Comment.Single),
(r'\[.*?\]$', Keyword),
(r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Text, Operator, Text, String))
]
}
def analyse_text(text):
npos = text.find('\n')
if npos < 3:
return False
return text[0] == '[' and text[npos-1] == ']'
class RegeditLexer(RegexLexer):
"""
Lexer for `Windows Registry
<http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
by regedit.
.. versionadded:: 1.6
"""
name = 'reg'
aliases = ['registry']
filenames = ['*.reg']
mimetypes = ['text/x-windows-registry']
tokens = {
'root': [
(r'Windows Registry Editor.*', Text),
(r'\s+', Text),
(r'[;#].*', Comment.Single),
(r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
bygroups(Keyword, Operator, Name.Builtin, Keyword)),
# String keys, which obey somewhat normal escaping
(r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
bygroups(Name.Attribute, Text, Operator, Text),
'value'),
# Bare keys (includes @)
(r'(.*?)([ \t]*)(=)([ \t]*)',
bygroups(Name.Attribute, Text, Operator, Text),
'value'),
],
'value': [
(r'-', Operator, '#pop'), # delete value
(r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
bygroups(Name.Variable, Punctuation, Number), '#pop'),
# As far as I know, .reg files do not support line continuation.
(r'.+', String, '#pop'),
default('#pop'),
]
}
def analyse_text(text):
return text.startswith('Windows Registry Editor')
class PropertiesLexer(RegexLexer):
"""
Lexer for configuration files in Java's properties format.
.. versionadded:: 1.4
"""
name = 'Properties'
aliases = ['properties', 'jproperties']
filenames = ['*.properties']
mimetypes = ['text/x-java-properties']
tokens = {
'root': [
(r'\s+', Text),
(r'(?:[;#]|//).*$', Comment),
(r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
bygroups(Name.Attribute, Text, Operator, Text, String)),
],
}
def _rx_indent(level):
# Kconfig *always* interprets a tab as 8 spaces, so this is the default.
# Edit this if you are in an environment where KconfigLexer gets expanded
# input (tabs expanded to spaces) and the expansion tab width is != 8,
# e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
# Value range here is 2 <= {tab_width} <= 8.
tab_width = 8
# Regex matching a given indentation {level}, assuming that indentation is
# a multiple of {tab_width}. In other cases there might be problems.
if tab_width == 2:
space_repeat = '+'
else:
space_repeat = '{1,%d}' % (tab_width - 1)
if level == 1:
level_repeat = ''
else:
level_repeat = '{%s}' % level
return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
class KconfigLexer(RegexLexer):
"""
For Linux-style Kconfig files.
.. versionadded:: 1.6
"""
name = 'Kconfig'
aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
# Adjust this if new kconfig file names appear in your environment
filenames = ['Kconfig', '*Config.in*', 'external.in*',
'standard-modules.in']
mimetypes = ['text/x-kconfig']
# No re.MULTILINE, indentation-aware help text needs line-by-line handling
flags = 0
def call_indent(level):
# If indentation >= {level} is detected, enter state 'indent{level}'
return (_rx_indent(level), String.Doc, 'indent%s' % level)
def do_indent(level):
# Print paragraphs of indentation level >= {level} as String.Doc,
# ignoring blank lines. Then return to 'root' state.
return [
(_rx_indent(level), String.Doc),
(r'\s*\n', Text),
default('#pop:2')
]
tokens = {
'root': [
(r'\s+', Text),
(r'#.*?\n', Comment.Single),
(words((
'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
'source', 'prompt', 'select', 'depends on', 'default',
'range', 'option'), suffix=r'\b'),
Keyword),
(r'(---help---|help)[\t ]*\n', Keyword, 'help'),
(r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
Name.Builtin),
(r'[!=&|]', Operator),
(r'[()]', Punctuation),
(r'[0-9]+', Number.Integer),
(r"'(''|[^'])*'", String.Single),
(r'"(""|[^"])*"', String.Double),
(r'\S+', Text),
],
# Help text is indented, multi-line and ends when a lower indentation
# level is detected.
'help': [
# Skip blank lines after help token, if any
(r'\s*\n', Text),
# Determine the first help line's indentation level heuristically(!).
# Attention: this is not perfect, but works for 99% of "normal"
# indentation schemes up to a max. indentation level of 7.
call_indent(7),
call_indent(6),
call_indent(5),
call_indent(4),
call_indent(3),
call_indent(2),
call_indent(1),
default('#pop'), # for incomplete help sections without text
],
# Handle text for indentation levels 7 to 1
'indent7': do_indent(7),
'indent6': do_indent(6),
'indent5': do_indent(5),
'indent4': do_indent(4),
'indent3': do_indent(3),
'indent2': do_indent(2),
'indent1': do_indent(1),
}
class Cfengine3Lexer(RegexLexer):
"""
Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
.. versionadded:: 1.5
"""
name = 'CFEngine3'
aliases = ['cfengine3', 'cf3']
filenames = ['*.cf']
mimetypes = []
tokens = {
'root': [
(r'#.*?\n', Comment),
(r'(body)(\s+)(\S+)(\s+)(control)',
bygroups(Keyword, Text, Keyword, Text, Keyword)),
(r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
'arglist'),
(r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
bygroups(Keyword, Text, Keyword, Text, Name.Function)),
(r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
bygroups(Punctuation, Name.Variable, Punctuation,
Text, Keyword.Type, Text, Operator, Text)),
(r'(\S+)(\s*)(=>)(\s*)',
bygroups(Keyword.Reserved, Text, Operator, Text)),
(r'"', String, 'string'),
(r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
(r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
(r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
(r'@[{(][^)}]+[})]', Name.Variable),
(r'[(){},;]', Punctuation),
(r'=>', Operator),
(r'->', Operator),
(r'\d+\.\d+', Number.Float),
(r'\d+', Number.Integer),
(r'\w+', Name.Function),
(r'\s+', Text),
],
'string': [
(r'\$[{(]', String.Interpol, 'interpol'),
(r'\\.', String.Escape),
(r'"', String, '#pop'),
(r'\n', String),
(r'.', String),
],
'interpol': [
(r'\$[{(]', String.Interpol, '#push'),
(r'[})]', String.Interpol, '#pop'),
(r'[^${()}]+', String.Interpol),
],
'arglist': [
(r'\)', Punctuation, '#pop'),
(r',', Punctuation),
(r'\w+', Name.Variable),
(r'\s+', Text),
],
}
class ApacheConfLexer(RegexLexer):
"""
Lexer for configuration files following the Apache config file
format.
.. versionadded:: 0.6
"""
name = 'ApacheConf'
aliases = ['apacheconf', 'aconf', 'apache']
filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
mimetypes = ['text/x-apacheconf']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'\s+', Text),
(r'(#.*?)$', Comment),
(r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
bygroups(Name.Tag, Text, String, Name.Tag)),
(r'([a-z]\w*)(\s+)',
bygroups(Name.Builtin, Text), 'value'),
(r'\.+', Text),
],
'value': [
(r'\\\n', Text),
(r'$', Text, '#pop'),
(r'\\', Text),
(r'[^\S\n]+', Text),
(r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
(r'\d+', Number),
(r'/([a-z0-9][\w./-]+)', String.Other),
(r'(on|off|none|any|all|double|email|dns|min|minimal|'
r'os|productonly|full|emerg|alert|crit|error|warn|'
r'notice|info|debug|registry|script|inetd|standalone|'
r'user|group)\b', Keyword),
(r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
(r'[^\s"\\]+', Text)
],
}
class SquidConfLexer(RegexLexer):
"""
Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
.. versionadded:: 0.9
"""
name = 'SquidConf'
aliases = ['squidconf', 'squid.conf', 'squid']
filenames = ['squid.conf']
mimetypes = ['text/x-squidconf']
flags = re.IGNORECASE
keywords = (
"access_log", "acl", "always_direct", "announce_host",
"announce_period", "announce_port", "announce_to", "anonymize_headers",
"append_domain", "as_whois_server", "auth_param_basic",
"authenticate_children", "authenticate_program", "authenticate_ttl",
"broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
"cache_dir", "cache_dns_program", "cache_effective_group",
"cache_effective_user", "cache_host", "cache_host_acl",
"cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
"cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
"cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
"cache_stoplist_pattern", "cache_store_log", "cache_swap",
"cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
"client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
"dead_peer_timeout", "debug_options", "delay_access", "delay_class",
"delay_initial_bucket_level", "delay_parameters", "delay_pools",
"deny_info", "dns_children", "dns_defnames", "dns_nameservers",
"dns_testnames", "emulate_httpd_log", "err_html_text",
"fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
"fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
"ftp_passive", "ftp_user", "half_closed_clients", "header_access",
"header_replace", "hierarchy_stoplist", "high_response_time_warning",
"high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
"http_anonymizer", "httpd_accel", "httpd_accel_host",
"httpd_accel_port", "httpd_accel_uses_host_header",
"httpd_accel_with_proxy", "http_port", "http_reply_access",
"icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
"ident_lookup", "ident_lookup_access", "ident_timeout",
"incoming_http_average", "incoming_icp_average", "inside_firewall",
"ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
"local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
"log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
"mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
"mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
"memory_pools_limit", "memory_replacement_policy", "mime_table",
"min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
"minimum_object_size", "minimum_retry_timeout", "miss_access",
"negative_dns_ttl", "negative_ttl", "neighbor_timeout",
"neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
"netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
"pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
"prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
"quick_abort", "quick_abort_max", "quick_abort_min",
"quick_abort_pct", "range_offset_limit", "read_timeout",
"redirect_children", "redirect_program",
"redirect_rewrites_host_header", "reference_age",
"refresh_pattern", "reload_into_ims", "request_body_max_size",
"request_size", "request_timeout", "shutdown_lifetime",
"single_parent_bypass", "siteselect_timeout", "snmp_access",
"snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
"store_avg_object_size", "store_objects_per_bucket",
"strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
"tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
"test_reachability", "udp_hit_obj", "udp_hit_obj_size",
"udp_incoming_address", "udp_outgoing_address", "unique_hostname",
"unlinkd_program", "uri_whitespace", "useragent_log",
"visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
)
opts = (
"proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
"multicast-responder", "on", "off", "all", "deny", "allow", "via",
"parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
"credentialsttl", "none", "disable", "offline_toggle", "diskd",
)
actions = (
"shutdown", "info", "parameter", "server_list", "client_list",
r'squid.conf',
)
actions_stats = (
"objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
"redirector", "io", "reply_headers", "filedescriptors", "netdb",
)
actions_log = ("status", "enable", "disable", "clear")
acls = (
"url_regex", "urlpath_regex", "referer_regex", "port", "proto",
"req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
"dst", "time", "dstdomain", "ident", "snmp_community",
)
ip_re = (
r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
r'[1-9]?\d)){3}))'
)
tokens = {
'root': [
(r'\s+', Whitespace),
(r'#', Comment, 'comment'),
(words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
(words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
# Actions
(words(actions, prefix=r'\b', suffix=r'\b'), String),
(words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
(words(actions_log, prefix=r'log/', suffix=r'='), String),
(words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
(ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
(r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
(r'\S+', Text),
],
'comment': [
(r'\s*TAG:.*', String.Escape, '#pop'),
(r'.+', Comment, '#pop'),
default('#pop'),
],
}
class NginxConfLexer(RegexLexer):
"""
Lexer for `Nginx <http://nginx.net/>`_ configuration files.
.. versionadded:: 0.11
"""
name = 'Nginx configuration file'
aliases = ['nginx']
filenames = []
mimetypes = ['text/x-nginx-conf']
tokens = {
'root': [
(r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
(r'[^\s;#]+', Keyword, 'stmt'),
include('base'),
],
'block': [
(r'\}', Punctuation, '#pop:2'),
(r'[^\s;#]+', Keyword.Namespace, 'stmt'),
include('base'),
],
'stmt': [
(r'\{', Punctuation, 'block'),
(r';', Punctuation, '#pop'),
include('base'),
],
'base': [
(r'#.*\n', Comment.Single),
(r'on|off', Name.Constant),
(r'\$[^\s;#()]+', Name.Variable),
(r'([a-z0-9.-]+)(:)([0-9]+)',
bygroups(Name, Punctuation, Number.Integer)),
(r'[a-z-]+/[a-z-+]+', String), # mimetype
# (r'[a-zA-Z._-]+', Keyword),
(r'[0-9]+[km]?\b', Number.Integer),
(r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
(r'[:=~]', Punctuation),
(r'[^\s;#{}$]+', String), # catch all
(r'/[^\s;#]*', Name), # pathname
(r'\s+', Text),
(r'[$;]', Text), # leftover characters
],
}
class LighttpdConfLexer(RegexLexer):
"""
Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
.. versionadded:: 0.11
"""
name = 'Lighttpd configuration file'
aliases = ['lighty', 'lighttpd']
filenames = []
mimetypes = ['text/x-lighttpd-conf']
tokens = {
'root': [
(r'#.*\n', Comment.Single),
(r'/\S*', Name), # pathname
(r'[a-zA-Z._-]+', Keyword),
(r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
(r'[0-9]+', Number),
(r'=>|=~|\+=|==|=|\+', Operator),
(r'\$[A-Z]+', Name.Builtin),
(r'[(){}\[\],]', Punctuation),
(r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
(r'\s+', Text),
],
}
class DockerLexer(RegexLexer):
"""
Lexer for `Docker <http://docker.io>`_ configuration files.
.. versionadded:: 2.0
"""
name = 'Docker'
aliases = ['docker', 'dockerfile']
filenames = ['Dockerfile', '*.docker']
mimetypes = ['text/x-dockerfile-config']
_keywords = (r'(?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
r'VOLUME|WORKDIR)')
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
bygroups(Name.Keyword, Whitespace, Keyword)),
(r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)),
(r'#.*', Comment),
(r'RUN', Keyword), # Rest of line falls through
(r'(.*\\\n)*.+', using(BashLexer)),
],
}
class TerraformLexer(RegexLexer):
"""
Lexer for `terraformi .tf files <https://www.terraform.io/>`_.
.. versionadded:: 2.1
"""
name = 'Terraform'
aliases = ['terraform', 'tf']
filenames = ['*.tf']
mimetypes = ['application/x-tf', 'application/x-terraform']
tokens = {
'root': [
include('string'),
include('punctuation'),
include('curly'),
include('basic'),
include('whitespace'),
(r'[0-9]+', Number),
],
'basic': [
(words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
(r'\s*/\*', Comment.Multiline, 'comment'),
(r'\s*#.*\n', Comment.Single),
(r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)),
(words(('variable', 'resource', 'provider', 'provisioner', 'module'),
prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'),
(words(('ingress', 'egress', 'listener', 'default', 'connection'),
prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
('\$\{', String.Interpol, 'var_builtin'),
],
'function': [
(r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)),
include('punctuation'),
include('curly'),
],
'var_builtin': [
(r'\$\{', String.Interpol, '#push'),
(words(('concat', 'file', 'join', 'lookup', 'element'),
prefix=r'\b', suffix=r'\b'), Name.Builtin),
include('string'),
include('punctuation'),
(r'\s+', Text),
(r'\}', String.Interpol, '#pop'),
],
'string': [
(r'(".*")', bygroups(String.Double)),
],
'punctuation': [
(r'[\[\]\(\),.]', Punctuation),
],
# Keep this seperate from punctuation - we sometimes want to use different
# Tokens for { }
'curly': [
(r'\{', Text.Punctuation),
(r'\}', Text.Punctuation),
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text),
],
}
class TermcapLexer(RegexLexer):
"""
Lexer for termcap database source.
This is very simple and minimal.
.. versionadded:: 2.1
"""
name = 'Termcap'
aliases = ['termcap',]
filenames = ['termcap', 'termcap.src',]
mimetypes = []
# NOTE:
# * multiline with trailing backslash
# * separator is ':'
# * to embed colon as data, we must use \072
# * space after separator is not allowed (mayve)
tokens = {
'root': [
(r'^#.*$', Comment),
(r'^[^\s#:\|]+', Name.Tag, 'names'),
],
'names': [
(r'\n', Text, '#pop'),
(r':', Punctuation, 'defs'),
(r'\|', Punctuation),
(r'[^:\|]+', Name.Attribute),
],
'defs': [
(r'\\\n[ \t]*', Text),
(r'\n[ \t]*', Text, '#pop:2'),
(r'(#)([0-9]+)', bygroups(Operator, Number)),
(r'=', Operator, 'data'),
(r':', Punctuation),
(r'[^\s:=#]+', Name.Class),
],
'data': [
(r'\\072', Literal),
(r':', Punctuation, '#pop'),
(r'[^:\\]+', Literal), # for performance
(r'.', Literal),
],
}
class TerminfoLexer(RegexLexer):
"""
Lexer for terminfo database source.
This is very simple and minimal.
.. versionadded:: 2.1
"""
name = 'Terminfo'
aliases = ['terminfo',]
filenames = ['terminfo', 'terminfo.src',]
mimetypes = []
# NOTE:
# * multiline with leading whitespace
# * separator is ','
# * to embed comma as data, we can use \,
# * space after separator is allowed
tokens = {
'root': [
(r'^#.*$', Comment),
(r'^[^\s#,\|]+', Name.Tag, 'names'),
],
'names': [
(r'\n', Text, '#pop'),
(r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'),
(r'\|', Punctuation),
(r'[^,\|]+', Name.Attribute),
],
'defs': [
(r'\n[ \t]+', Text),
(r'\n', Text, '#pop:2'),
(r'(#)([0-9]+)', bygroups(Operator, Number)),
(r'=', Operator, 'data'),
(r'(,)([ \t]*)', bygroups(Punctuation, Text)),
(r'[^\s,=#]+', Name.Class),
],
'data': [
(r'\\[,\\]', Literal),
(r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'),
(r'[^\\,]+', Literal), # for performance
(r'.', Literal),
],
}
class PkgConfigLexer(RegexLexer):
"""
Lexer for `pkg-config
<http://www.freedesktop.org/wiki/Software/pkg-config/>`_
(see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
.. versionadded:: 2.1
"""
name = 'PkgConfig'
aliases = ['pkgconfig',]
filenames = ['*.pc',]
mimetypes = []
tokens = {
'root': [
(r'#.*$', Comment.Single),
# variable definitions
(r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
# keyword lines
(r'^([\w.]+)(:)',
bygroups(Name.Tag, Punctuation), 'spvalue'),
# variable references
include('interp'),
# fallback
(r'[^${}#=:\n.]+', Text),
(r'.', Text),
],
'interp': [
# you can escape literal "$" as "$$"
(r'\$\$', Text),
# variable references
(r'\$\{', String.Interpol, 'curly'),
],
'curly': [
(r'\}', String.Interpol, '#pop'),
(r'\w+', Name.Attribute),
],
'spvalue': [
include('interp'),
(r'#.*$', Comment.Single, '#pop'),
(r'\n', Text, '#pop'),
# fallback
(r'[^${}#\n]+', Text),
(r'.', Text),
],
}
class PacmanConfLexer(RegexLexer):
"""
Lexer for `pacman.conf
<https://www.archlinux.org/pacman/pacman.conf.5.html>`_.
Actually, IniLexer works almost fine for this format,
but it yield error token. It is because pacman.conf has
a form without assignment like:
UseSyslog
Color
TotalDownload
CheckSpace
VerbosePkgLists
These are flags to switch on.
.. versionadded:: 2.1
"""
name = 'PacmanConf'
aliases = ['pacmanconf',]
filenames = ['pacman.conf',]
mimetypes = []
tokens = {
'root': [
# comment
(r'#.*$', Comment.Single),
# section header
(r'^\s*\[.*?\]\s*$', Keyword),
# variable definitions
# (Leading space is allowed...)
(r'(\w+)(\s*)(=)',
bygroups(Name.Attribute, Text, Operator)),
# flags to on
(r'^(\s*)(\w+)(\s*)$',
bygroups(Text, Name.Attribute, Text)),
# built-in special values
(words((
'$repo', # repository
'$arch', # architecture
'%o', # outfile
'%u', # url
), suffix=r'\b'),
Name.Variable),
# fallback
(r'.', Text),
],
}
| gpl-3.0 | -11,670,821,120,337,016 | 32.680774 | 82 | 0.476808 | false |
alexlo03/ansible | lib/ansible/modules/cloud/ovirt/ovirt_host_pm.py | 8 | 8366 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_host_pm
short_description: Module to manage power management of hosts in oVirt/RHV
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage power management of hosts in oVirt/RHV."
options:
name:
description:
- "Name of the host to manage."
required: true
aliases: ['host']
state:
description:
- "Should the host be present/absent."
choices: ['present', 'absent']
default: present
address:
description:
- "Address of the power management interface."
username:
description:
- "Username to be used to connect to power management interface."
password:
description:
- "Password of the user specified in C(username) parameter."
type:
description:
- "Type of the power management. oVirt/RHV predefined values are I(drac5), I(ipmilan), I(rsa),
I(bladecenter), I(alom), I(apc), I(apc_snmp), I(eps), I(wti), I(rsb), I(cisco_ucs),
I(drac7), I(hpblade), I(ilo), I(ilo2), I(ilo3), I(ilo4), I(ilo_ssh),
but user can have defined custom type."
port:
description:
- "Power management interface port."
options:
description:
- "Dictionary of additional fence agent options (including Power Management slot)."
- "Additional information about options can be found at U(https://github.com/ClusterLabs/fence-agents/blob/master/doc/FenceAgentAPI.md)."
encrypt_options:
description:
- "If I(true) options will be encrypted when send to agent."
aliases: ['encrypt']
order:
description:
- "Integer value specifying, by default it's added at the end."
version_added: "2.5"
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Add fence agent to host 'myhost'
- ovirt_host_pm:
name: myhost
address: 1.2.3.4
options:
myoption1: x
myoption2: y
username: admin
password: admin
port: 3333
type: ipmilan
# Add fence agent to host 'myhost' using 'slot' option
- ovirt_host_pm:
name: myhost
address: 1.2.3.4
options:
myoption1: x
myoption2: y
slot: myslot
username: admin
password: admin
port: 3333
type: ipmilan
# Remove ipmilan fence agent with address 1.2.3.4 on host 'myhost'
- ovirt_host_pm:
state: absent
name: myhost
address: 1.2.3.4
type: ipmilan
'''
RETURN = '''
id:
description: ID of the agent which is managed
returned: On success if agent is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
agent:
description: "Dictionary of all the agent attributes. Agent attributes can be found on your oVirt/RHV instance
at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/agent."
returned: On success if agent is found.
type: dict
'''
import traceback
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
create_connection,
equal,
ovirt_full_argument_spec,
search_by_name,
)
class HostModule(BaseModule):
def build_entity(self):
return otypes.Host(
power_management=otypes.PowerManagement(
enabled=True,
),
)
def update_check(self, entity):
return equal(True, entity.power_management.enabled)
class HostPmModule(BaseModule):
def pre_create(self, entity):
# Save the entity, so we know if Agent already existed
self.entity = entity
def build_entity(self):
last = next((s for s in sorted([a.order for a in self._service.list()])), 0)
order = self.param('order') if self.param('order') is not None else self.entity.order if self.entity else last + 1
return otypes.Agent(
address=self._module.params['address'],
encrypt_options=self._module.params['encrypt_options'],
options=[
otypes.Option(
name=name,
value=value,
) for name, value in self._module.params['options'].items()
] if self._module.params['options'] else None,
password=self._module.params['password'],
port=self._module.params['port'],
type=self._module.params['type'],
username=self._module.params['username'],
order=order,
)
def update_check(self, entity):
def check_options():
if self.param('options'):
current = []
if entity.options:
current = [(opt.name, str(opt.value)) for opt in entity.options]
passed = [(k, str(v)) for k, v in self.param('options').items()]
return sorted(current) == sorted(passed)
return True
return (
check_options() and
equal(self._module.params.get('address'), entity.address) and
equal(self._module.params.get('encrypt_options'), entity.encrypt_options) and
equal(self._module.params.get('username'), entity.username) and
equal(self._module.params.get('port'), entity.port) and
equal(self._module.params.get('type'), entity.type) and
equal(self._module.params.get('order'), entity.order)
)
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent'],
default='present',
),
name=dict(default=None, required=True, aliases=['host']),
address=dict(default=None),
username=dict(default=None),
password=dict(default=None, no_log=True),
type=dict(default=None),
port=dict(default=None, type='int'),
order=dict(default=None, type='int'),
options=dict(default=None, type='dict'),
encrypt_options=dict(default=None, type='bool', aliases=['encrypt']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
hosts_service = connection.system_service().hosts_service()
host = search_by_name(hosts_service, module.params['name'])
fence_agents_service = hosts_service.host_service(host.id).fence_agents_service()
host_pm_module = HostPmModule(
connection=connection,
module=module,
service=fence_agents_service,
)
host_module = HostModule(
connection=connection,
module=module,
service=hosts_service,
)
state = module.params['state']
if state == 'present':
agent = host_pm_module.search_entity(
search_params={
'address': module.params['address'],
'type': module.params['type'],
}
)
ret = host_pm_module.create(entity=agent)
# Enable Power Management, if it's not enabled:
host_module.create(entity=host)
elif state == 'absent':
agent = host_pm_module.search_entity(
search_params={
'address': module.params['address'],
'type': module.params['type'],
}
)
ret = host_pm_module.remove(entity=agent)
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == "__main__":
main()
| gpl-3.0 | -2,942,656,587,338,930,000 | 31.176923 | 149 | 0.588573 | false |
nearai/program_synthesis | program_synthesis/naps/uast/uast.py | 1 | 66502 | from __future__ import print_function
import functools
import six
import sys
import time
import math
import numpy as np
import re
from operator import mul
from sortedcontainers import SortedDict, SortedSet
from .uast_watcher import WatcherEvent, tuplify
DEBUG_INFO = False
LARGEST_INT = 2 ** 64
OBJECT = "object"
BOOL = "bool"
CHAR = "char"
STRING = "char*"
INT = "int"
REAL = "real"
VOID = "void"
FUNC = "func"
if not six.PY2:
long = int
def watchable(event_type):
def watchable_internal(some_func):
def wrapper(executor, context, *args, **kwargs):
if not executor.watchers: # don't waste precious cycles if there are no watchers
return some_func(executor, context, *args, **kwargs)
assert len(kwargs) <= 1, "%s for %s" % (kwargs, some_func)
all_args = list(args) + list(kwargs.values())
executor._watch(WatcherEvent("before_" + event_type, executor, context, *all_args))
ret = some_func(executor, context, *args, **kwargs)
executor._watch(WatcherEvent("after_" + event_type, executor, context, ret, *all_args))
return ret
return wrapper
return watchable_internal
class IO_t:
SCANNER = 'scanner'
PRINTER = 'printer'
next_int = ['invoke', INT, '_io_next_int', []]
next_real = ['invoke', INT, '_io_next_real', []]
next_line = ['invoke', STRING, '_io_next_line', []]
next_string = ['invoke', STRING, '_io_next_word', []]
def print_(self, x):
return ['invoke', VOID, '_io_print', [x]]
def println(self, x):
return ['invoke', VOID, '_io_println', [x]]
def __init__(self):
self.func_to_type = {}
self.func_to_type[self.next_int[2]] = INT
self.func_to_type[self.next_real[2]] = REAL
self.func_to_type[self.next_line[2]] = STRING
self.func_to_type[self.next_string[2]] = STRING
IO = IO_t()
GLOBALS_NAME = "__globals__"
class UASTNotImplementedException(Exception):
def __init__(self, feature, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
self.feature = feature
def __str__(self):
return "UAST Not Implemented: %s" % self.feature
class UASTTimeLimitExceeded(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class UASTParseError(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
def var(name, type_):
return ["var", type_, name]
def get_expr_type(var):
return var[1]
def get_var_name(var):
assert var[0] == 'var'
return var[2]
def set_var_name(var, name):
var[2] = name
def constant(type_, value):
return ["val", type_, value]
def func(name, return_type=VOID):
return ["func", return_type, name, [], [], []]
def get_func_return_type(f):
assert f[0] in ['func', 'ctor'], f[0]
return f[1]
def get_func_name(f):
assert f[0] in ['func', 'ctor'], f[0]
return f[2]
def set_func_name(f, new_name):
assert f[0] in ['func', 'ctor'], f[0]
f[2] = new_name
def get_func_args(f):
assert f[0] in ['func', 'ctor'], f[0]
return f[3]
def get_func_vars(f):
assert f[0] in ['func', 'ctor'], f[0]
return f[4]
def get_func_body(f):
assert f[0] in ['func', 'ctor'], f[0]
return f[5]
def record(name):
return ["record", name, {}]
def get_record_name(record):
return record[1]
def get_record_fields(record):
return record[2]
def func_call(func_name, args, type_):
return ["invoke", type_, func_name, args]
def assign(lhs, rhs):
return ["assign", rhs[1], lhs, rhs]
def field(jcontext, obj, field):
type_ = get_expr_type(obj)
assert isinstance(type_, six.string_types), type_
assert type_[-1] == '#', type_
record = jcontext.get_record(type_[:-1])
return ["field", get_expr_type(get_record_fields(record)[field]), obj, field]
def type_array(subtype):
return subtype + "*"
def type_set(subtype):
return subtype + "%"
def type_map(subtype1, subtype2):
return '<' + subtype1 + '|' + subtype2 + ">"
def type_record(name):
return name + "#"
def get_array_subtype(tp):
assert tp[-1] == '*', tp
return tp[:-1]
def get_set_subtype(tp):
assert tp[-1] == '%', tp
return tp[:-1]
def get_map_key_type(tp):
assert tp[0] == '<', tp
assert tp[-1] == '>'
ret = ""
balance = 0
for ch in tp[1:-1]:
if ch == '<':
balance += 1
elif ch == '>':
assert balance > 0
balance -= 1
elif ch == '|' and balance == 0:
break
ret += ch
return ret
def get_map_value_type(tp):
assert tp[0] == '<', tp
assert tp[-1] == '>'
ret = ""
balance = 0
saw_pipe = False
for ch in tp[1:-1]:
if saw_pipe:
ret += ch
if ch == '<':
balance += 1
elif ch == '>':
assert balance > 0
balance -= 1
elif ch == '>':
assert saw_pipe
break
elif ch == '|' and balance == 0:
saw_pipe = True
return ret
def type_to_record_name(tp):
assert tp[-1] == '#', "%s <> %s" % (tp, tp[-1])
return tp[:-1]
def is_array(tp):
return tp[-1] == '*'
def is_record_type(tp):
return tp[-1] in ['#']
def is_int_type(tp): # doesn't include char!
return tp in [INT]
def is_set_type(tp):
return tp[-1] in ['%']
def is_map_type(tp):
return tp[-1] in ['>']
def if_(cond, then, else_):
return ["if", VOID, cond, then, else_]
def ternary(cond, then, else_):
return ["?:", arithmetic_op_type(get_expr_type(then), get_expr_type(else_), allow_same=True), cond, then, else_]
def while_(cond, body, finally_):
return ["while", VOID, cond, body, finally_]
def for_each(var, collection, body):
return ["foreach", VOID, var, collection, body]
def arithmetic_op_type(tp1, tp2, allow_same=False):
if allow_same and (tp1 == tp2 or tp1 == OBJECT or tp2 == OBJECT): # TODO: check that we are not comparing object and value type
return tp1
for pr in [REAL, INT, CHAR, BOOL]:
if tp1 == pr or tp2 == pr:
return pr
raise UASTNotImplementedException("Arithmetic op on %s and %s" % (tp1, tp2))
def convert_binary_expression(arg1, arg2, operator):
if get_expr_type(arg1) == BOOL and get_expr_type(arg2) == BOOL \
and operator in ['|', '&', '^']:
operator += operator
if operator in ['&&', '||', '==', '!=', '<', '<=', '>', '>=', '^^']:
return func_call(operator if operator != '^^' else '^', [arg1, arg2], BOOL)
if get_expr_type(arg1) == STRING or get_expr_type(arg2) == STRING:
if get_expr_type(arg1) in [STRING, CHAR] and get_expr_type(arg2) in [STRING, CHAR]:
assert operator == '+', operator
return func_call('concat', [arg1, arg2], STRING)
elif get_expr_type(arg1) in [STRING, VOID] and get_expr_type(arg2) in [STRING, VOID]:
assert operator in ['==', '!='], operator
return func_call(operator, [arg1, arg2], BOOL)
elif get_expr_type(arg1) == STRING:
assert operator == '+', operator
return func_call('concat', [arg1, func_call('str', [arg2], STRING)], STRING)
elif get_expr_type(arg2) == STRING:
assert operator == '+', operator
return func_call('concat', [func_call('str', [arg1], STRING), arg2], STRING)
assert False, "%s %s %s" % (get_expr_type(arg1), operator, get_expr_type(arg2))
if operator in ['+', '*', '%', '&', '|', '^', '-', '/', '>>', '<<']:
tp_ = arithmetic_op_type(get_expr_type(arg1), get_expr_type(arg2))
return func_call(operator, [arg1, arg2], tp_)
else:
raise UASTNotImplementedException("operator %s" % operator)
def is_assigneable(expr):
return expr[0] in ['var', 'field'] or expr[0] == 'invoke' and expr[2] == 'array_index'
def assert_val_matches_type(val, tp):
if tp == '?':
return
if not val_matches_type(val, tp):
if isinstance(val, float) and is_int_type(tp):
raise UASTNotImplementedException("Implicit cast from REAL to INT")
if val is None and is_int_type(tp):
raise UASTNotImplementedException("Implicit cast from NULL to INT")
assert False, "Type mismatch.\n Type: %s;\n Val: %s\n Val type: %s\n" % (tp, val, type(val))
def val_matches_type(val, tp, verbose=False):
if is_int_type(tp) or tp == CHAR:
# allow implicit conversion from float to int
return isinstance(val, float) or isinstance(val, int) or isinstance(val, long)
elif tp in [REAL]:
return isinstance(val, float) or isinstance(val, int)
elif tp in [STRING]:
return isinstance(val, six.string_types) or val is None
elif tp in [BOOL]:
return isinstance(val, bool)
elif tp[-1] in ["*"]:
return isinstance(val, list) or val is None
elif tp[-1] in ['#']:
return isinstance(val, dict) or val is None
elif tp[-1] in ['>']:
return isinstance(val, SortedDict) or val is None
elif tp[-1] in ['%']:
return isinstance(val, SortedSet) or val is None
elif tp == 'void':
return val is None
elif tp == 'func':
return isinstance(val, six.string_types)
elif tp in 'object':
return not isinstance(val, int) and not isinstance(val, long) and not isinstance(val, float) and not isinstance(val, bool)
elif tp in [IO.SCANNER, IO.PRINTER]:
return val is None
else:
assert False, tp
def can_cast(to_, from_):
if from_ == '?':
return True
if (to_[-1] in ['*', '#', '>', '%'] or to_ == OBJECT) and \
(from_[-1] in ['*', '#', '>', '%'] or from_ == OBJECT):
return True
return to_ in [INT, REAL, CHAR] and from_ in [INT, REAL, CHAR, STRING]
def get_block_statements(block):
assert isinstance(block, list)
return block
def default_value(ret_type):
if ret_type in [INT, REAL, CHAR]:
return 0
elif ret_type in [STRING]:
return ""
elif ret_type == BOOL:
return False
return None
# parse_context is either JContext or CContext
def prepare_global_var_and_func(parse_context):
gi_fname = GLOBALS_NAME + ".__init__"
globals_ = record(GLOBALS_NAME)
parse_context.register_type(GLOBALS_NAME, type_record(GLOBALS_NAME))
parse_context.program['types'].append(globals_)
parse_context.globals_record = globals_
parse_context.globals_init_var = var(GLOBALS_NAME, type_record(GLOBALS_NAME))
parse_context.globals_init_func = func(gi_fname, VOID)
return gi_fname
class InputSchemaExtractorNotSupportedException(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class InputSchemaExtractor(object):
def __init__(self, data, attempt_multi_test=False):
super(InputSchemaExtractor, self).__init__()
self.uast = data
self.is_multi_test = attempt_multi_test
self.multi_test_var = None
self.multi_test_iter = None
self.multi_test_loop = None
self.inside_multi_test_loop = False
self.funcs = {get_func_name(func): func for func in data['funcs']}
self.types = {get_record_name(record): record for record in data['types']}
self.schema = []
self.cur_schema = self.schema
self.var_map = {}
self.arr_map = {}
self.init_vals = {}
self.var_map_assigns = {}
self.arr_map_assigns = {}
self.arr_map_inits = {}
self.bypassed_arrays = set()
self.remove_vars = set()
self.not_impl_stack = []
self.loop_stack = []
self.func_stack = []
self.cur_branch_out_len = 0
self.max_branch_out_len = 0
self.output_type = None
self.funcs_visited = {}
self.funcs_with_reads = set()
self.funcs_with_writes = set()
self.func_returns = {}
self.num_args = 0
# datastructures to postprocess code
self.replace_with_noops = []
self.process_input_blocks = []
self.process_output_blocks = []
self.process_invokes_with_reads = []
self.process_invokes_with_writes = []
def push_not_impl(self, s):
self.not_impl_stack.append(s)
def pop_not_impl(self):
self.not_impl_stack.pop()
def check_not_impl(self):
if self.not_impl_stack:
raise InputSchemaExtractorNotSupportedException(self.not_impl_stack[-1])
def next_arg_name(self):
if self.num_args >= 26:
raise InputSchemaExtractorNotSupportedException("More than 26 arguments")
self.num_args += 1
return chr(ord('a') + self.num_args - 1)
def crawl_stmt(self, stmt):
# TODO: presently not supporting reading lines
def hack_fix_me(s):
if s == 'line': return 'word'
return s
if stmt[0] == 'if': # TODO: properly handle this
self.push_not_impl("IO inside if")
self.crawl_stmt(stmt[2])
out_depth_before = self.cur_branch_out_len
self.crawl_stmt_list(stmt[3])
out_depth_after = self.cur_branch_out_len
self.cur_branch_out_len = out_depth_before
self.crawl_stmt_list(stmt[4])
self.cur_branch_out_len = max(self.cur_branch_out_len, out_depth_after)
self.pop_not_impl()
elif stmt[0] == 'foreach':
self.push_not_impl("IO inside foreach")
self.loop_stack.append(None)
self.crawl_stmt_list(stmt[4])
self.loop_stack.pop()
self.pop_not_impl()
elif stmt[0] == 'while':
cond = stmt[2]
body = stmt[3]
is_up_to_t = self.is_multi_test and self.multi_test_var is not None
if is_up_to_t:
is_up_to_t = cond[0] == 'invoke' and cond[2] in ('<', '<=') and cond[3][1][0] == 'var' and cond[3][1][2] == self.multi_test_var[0][2] and cond[3][0][0] == 'var' and cond[3][0][2] in self.init_vals
init_val = self.init_vals[cond[3][0][2]] if is_up_to_t else None
is_up_to_t = is_up_to_t and init_val is not None and (cond[2] == '<' and init_val == 0) #or cond[2] == '<=' and init_val == 1) # TODO: add support for 1-based indexing. Presently turned off because we use the variable to index into the tests
if is_up_to_t:
self.multi_test_iter = cond[3][0]
self.multi_test_loop = stmt
assert len(self.schema) == 1 and self.schema[0] == self.multi_test_var[1], "%s <> %s" % (self.schema, self.multi_test_var[1])
self.process_input_blocks.append([self.schema[0], cond[3][1]])
#print(('T', is_up_to_t, cond, init_val, cond[3][0][2], self.multi_test_var))
else: # try while(t-->0)
if cond[0] == 'invoke' and cond[2] in ('>', '!='):
#print("Step1...")
cond_lhs = cond[3][0]
cond_rhs = cond[3][1]
common_cond = cond_rhs[0] == 'val' and cond_rhs[2] == 0
while_t_minus_minus = common_cond and cond_lhs[0] == 'invoke' and cond_lhs[2] == '+' and cond_lhs[3][1][0] == 'val' and cond_lhs[3][1][2] == 1
if while_t_minus_minus:
#print("Step2...")
while_t_minus_minus = cond_lhs[3][0][0] == 'assign' and cond_lhs[3][0][2][0] == 'var' and cond_lhs[3][0][2][2] == self.multi_test_var[0][2]
while_t = False
if not while_t_minus_minus:
while_t = common_cond and cond_lhs[0] == 'var' and cond_lhs[2] == self.multi_test_var[0][2]
while_t = while_t and body and body[-1][0] == 'assign' and body[-1][2][0] == 'var' and body[-1][2][2] == cond_lhs[2]
if while_t:
assign_rhs = body[-1][3]
#print("ASSIGN_RHS", assign_rhs)
while_t = assign_rhs[0] == 'invoke' and assign_rhs[2] == '-' and assign_rhs[3][0][0] == 'var' and assign_rhs[3][0][2] == cond_lhs[2]
#print(body[-1])
if while_t_minus_minus or while_t:
if while_t:
body.pop()
if body and body[-1][0] == 'var' and body[-1][2] == self.multi_test_var[0][2]:
body.pop()
# TODO: would make sense to check if assign is correct, but probabilisticly it probably is :)
#print("Step3...")
self.multi_test_iter = ["var", INT, "ti"] #TODO: ti should be available
self.multi_test_loop = stmt
is_up_to_t = True
assert len(self.schema) == 1 and self.schema[0] == self.multi_test_var[1], "%s <> %s" % (self.schema, self.multi_test_var[1])
new_lhs = self.multi_test_iter
new_rhs = cond_lhs[3][0][2] if while_t_minus_minus else cond_lhs
stmt[2] = cond = ["invoke", BOOL, "<", [new_lhs, new_rhs]]
stmt[4] = [["assign", INT, self.multi_test_iter, ['invoke', INT, '+', [self.multi_test_iter, ['val', INT, 1]]]]]
self.process_input_blocks.append([self.schema[0], new_rhs])
is_up_to_n = cond[0] == 'invoke' and cond[2] in ('<', '<=') and cond[3][1][0] == 'var' and cond[3][1][2] in self.var_map and cond[3][0][0] == 'var' and cond[3][0][2] in self.init_vals
init_val = self.init_vals[cond[3][0][2]] if is_up_to_n else None
is_up_to_n = is_up_to_n and init_val is not None and (cond[2] == '<' and init_val == 0 or cond[2] == '<=' and init_val == 1)
assert not is_up_to_n or not is_up_to_t
#print(('N', is_up_to_n, cond, init_val, cond[3][0][2]))
if is_up_to_t:
if self.inside_multi_test_loop:
raise InputSchemaExtractorNotSupportedException("Iterating over the `t` inside iterating over `t` for multitest case")
self.inside_multi_test_loop = True
self.crawl_stmt_list(body)
self.inside_multi_test_loop = False
elif is_up_to_n:
self.loop_stack.append(self.var_map[cond[3][1][2]])
old_cur_schema = self.cur_schema
self.cur_schema.append(['loop', VOID, []])
self.cur_schema = self.schema[-1][2]
self.crawl_stmt_list(body)
if not self.cur_schema:
old_cur_schema.pop()
self.cur_schema = old_cur_schema
self.loop_stack.pop()
else:
self.push_not_impl("IO inside for other than range for on an input")
self.loop_stack.append(None)
self.crawl_stmt_list(body)
self.loop_stack.pop()
self.pop_not_impl()
elif stmt[0] in ['break', 'continue', 'noop']:
pass
elif stmt[0] == 'return':
func_name = get_func_name(self.func_stack[-1])
if func_name not in self.func_returns:
self.func_returns[func_name] = []
self.func_returns[func_name].append(stmt)
self.crawl_stmt(stmt[2])
# Expressions
elif stmt[0] == 'assign':
if stmt[2][1] in [IO.SCANNER, IO.PRINTER]:
self.replace_with_noops.append(stmt)
else:
ret = self.crawl_stmt(stmt[3])
if ret is not None and stmt[2][0] == 'var':
if self.is_multi_test and self.multi_test_var is None:
self.multi_test_var = (stmt[2], ret)
self.replace_with_noops.append(stmt)
else:
self.var_map[stmt[2][2]] = ret
if stmt[3][0] != 'var':
self.var_map_assigns[stmt[2][2]] = stmt
if ret is not None and stmt[2][0] == 'invoke' and stmt[2][2] == 'array_index' and stmt[2][3][0][0] == 'var' and stmt[2][3][1][0] == 'var':
self.arr_map[stmt[2][3][0][2]] = ret
self.arr_map_assigns[stmt[2][3][0][2]] = stmt
if stmt[3][0] == 'val' and stmt[2][0] == 'var':
#print("Assigning %s to %s" % (stmt[2][2], stmt[3][2]))
self.init_vals[stmt[2][2]] = stmt[3][2]
if stmt[2][0] == 'var':
if stmt[2][2] not in self.arr_map_inits:
self.arr_map_inits[stmt[2][2]] = stmt
else:
self.arr_map_inits[stmt[2][2]] = False
elif stmt[0] == 'var':
if stmt[2] in self.var_map:
return self.var_map[stmt[2]]
elif stmt[0] == 'field':
self.crawl_stmt(stmt[2])
elif stmt[0] == 'val':
pass
elif stmt[0] == 'invoke':
if stmt[2].startswith('_io_next_'):
if self.is_multi_test and self.multi_test_var is not None and not self.inside_multi_test_loop:
raise InputSchemaExtractorNotSupportedException("Multitest schema with input outside of multitest while loop: %s" % stmt)
self.funcs_with_reads.add(get_func_name(self.func_stack[-1]))
self.check_not_impl()
if len(self.loop_stack) > 1:
raise InputSchemaExtractorNotSupportedException("Nested loops")
if not self.loop_stack:
new_entry = ['in', IO.func_to_type[stmt[2]], self.next_arg_name(), hack_fix_me(stmt[2].split('_')[-1])]
else:
new_entry = ['in', type_array(IO.func_to_type[stmt[2]]), self.next_arg_name(), stmt[2].split('_')[-1]]
if self.loop_stack[-1][0] == 'in':
self.loop_stack[-1][0] = 'size'
self.loop_stack[-1][1] = INT
self.loop_stack[-1][2] = [new_entry[2]]
else:
assert self.loop_stack[-1][0] == 'size'
if new_entry[2] not in self.loop_stack[-1][2]:
self.loop_stack[-1][2].append(new_entry[2])
self.process_input_blocks.append([new_entry, stmt])
self.cur_schema.append(new_entry)
return new_entry
elif stmt[2].startswith('_io_print'):
self.funcs_with_writes.add(get_func_name(self.func_stack[-1]))
assert len(stmt[3]) in [0, 1], stmt
if len(stmt[3]):
#if self.is_multi_test and not self.inside_multi_test_loop:
# raise InputSchemaExtractorNotSupportedException("Multitest schema with output outside of multitest while loop")
if self.loop_stack or self.inside_multi_test_loop:
self.cur_branch_out_len = 2 # >1 means return a list
else:
self.cur_branch_out_len += 1
self.max_branch_out_len = max(self.max_branch_out_len, self.cur_branch_out_len)
new_output_type = get_expr_type(stmt[3][0])
if self.output_type is not None and self.output_type != new_output_type:
if self.output_type == 'char*' and not new_output_type.endswith('*'):
pass
elif not self.output_type.endswith('*') and new_output_type == 'char*':
self.output_type = 'char*'
else:
raise InputSchemaExtractorNotSupportedException("Mixing different output types: %s and %s" % (self.output_type, new_output_type))
else:
self.output_type = new_output_type
self.process_output_blocks.append(stmt)
else:
self.replace_with_noops.append(stmt)
else:
assert not stmt[2].startswith('_io_')
# TODO: invoke the function if it's a user-defined function
for arg in stmt[3]:
if len(arg) <= 1:
assert False, "argument doesn't have two elements. Stmt: %s; arg: %s" % (stmt, arg)
if arg[1] in [IO.PRINTER, IO.SCANNER]:
arg[:] = ['val', VOID, None]
self.crawl_stmt(arg)
if stmt[2] in self.funcs:
snapshot_var_map = self.var_map
self.var_map = {}
assert get_func_name(self.funcs[stmt[2]]) == stmt[2], "%s <> %s" % (self.funcs[stmt[2]], stmt[2])
self.crawl_func(self.funcs[stmt[2]])
# TODO: this won't work if a function that reads stuff is called twice, but it doesn't appear to be a common case
if stmt[2] in self.funcs_with_reads:
self.funcs_with_reads.add(get_func_name(self.func_stack[-1]))
self.process_invokes_with_reads.append(stmt)
if stmt[2] in self.funcs_with_writes:
self.funcs_with_writes.add(get_func_name(self.func_stack[-1]))
self.process_invokes_with_writes.append(stmt)
self.var_map = snapshot_var_map
elif stmt[0] == '?:':
self.push_not_impl("IO inside ternary op")
self.crawl_stmt(stmt[2])
self.crawl_stmt(stmt[3])
self.crawl_stmt(stmt[4])
self.pop_not_impl()
elif stmt[0] == 'cast':
ret = self.crawl_stmt(stmt[2])
if ret is not None:
if get_expr_type(stmt) not in (INT, REAL):
raise InputSchemaExtractorNotSupportedException("CAST of input to %s" % get_expr_type(stmt))
if not ret[1].startswith('char*'):
return None
#print("replacing %s / %s with %s" % (ret[1], ret[3], get_expr_type(stmt)))
ret[1] = ret[1].replace('char*', get_expr_type(stmt))
ret[3] = get_expr_type(stmt)
return ret
else:
assert False, stmt[0]
def crawl_stmt_list(self, l):
for s in l:
self.crawl_stmt(s)
def crawl_func(self, func):
self.func_stack.append(func)
func_name = get_func_name(func)
if func_name not in self.funcs_visited:
self.funcs_visited[func_name] = 1
else:
self.funcs_visited[func_name] += 1
if self.funcs_visited[func_name] > 10:
self.func_stack.pop()
return # to prevent recursion / exponential blow up
self.crawl_stmt_list(get_func_body(func))
self.func_stack.pop()
def extract_schema(self, lang):
entry_point = None
for func_name, func in self.funcs.items():
if lang == 'c++':
if func_name == 'main':
if entry_point is not None:
raise InputSchemaExtractorNotSupportedException("Multiple entry points")
entry_point = func
elif lang == 'java':
if func_name.endswith(".main"):
args = get_func_args(func)
if len(args) == 1 and get_var_name(args[0]) != 'this':
if entry_point is not None:
raise InputSchemaExtractorNotSupportedException("Multiple entry points")
entry_point = func
else:
assert False
if entry_point is None:
raise InputSchemaExtractorNotSupportedException("Entry point not found")
self.entry_point = entry_point
self.push_not_impl("I/O in global initializer")
self.crawl_func(self.funcs[GLOBALS_NAME + ".__init__"])
self.pop_not_impl()
self.crawl_func(entry_point)
if not self.schema or (self.is_multi_test and len(self.schema) == 1):
raise InputSchemaExtractorNotSupportedException("Input schema is not derived")
if self.output_type is not None:
if self.max_branch_out_len > 1:
self.output_type = type_array(self.output_type)
self.schema.append(['out', self.output_type])
else:
raise InputSchemaExtractorNotSupportedException("Output type is not derived")
if self.is_multi_test:
self.schema[0][0] = 'testN'
# BFS to remove empty loops
while True:
found = False
x = [(x, self.schema, i) for (i, x) in enumerate(self.schema)]
for el, parent, idx in x:
if el[0] == 'loop':
if not el[2]:
del parent[idx]
found = True
break
else:
x += [(x, el[2], i) for (i, x) in enumerate(el[2])]
if not found:
break
if not self.is_multi_test:
for k, v in self.var_map.items():
if v[0] == 'in' and (v[1] == 'char*' or not v[1].endswith('*')) and k in self.var_map_assigns:
self.remove_vars.add(k)
self.replace_with_noops.append(self.var_map_assigns[k])
v[2] = k
for k, v in self.arr_map.items():
if v[0] == 'in' and v[1].endswith('*') and v[1] != 'char*':
if k in self.arr_map_inits:
if self.arr_map_inits[k] == False:
continue
self.replace_with_noops.append(self.arr_map_inits[k])
self.remove_vars.add(k)
self.replace_with_noops.append(self.arr_map_assigns[k])
self.bypassed_arrays.add(k)
for sz in self.schema:
if sz[0] == 'size':
for i, x in enumerate(sz[2]):
if x == v[2]:
sz[2][i] = k
v[2] = k
#print(self.arr_map)
#print(self.arr_map, self.arr_map_assigns)
return self.schema
def matches_schema(self, other_schema):
if len(self.schema) != len(other_schema):
return False
for our, their in zip(self.schema, other_schema):
if our != their:
if our[0] == 'out' and their[0] == 'out':
if our[1] + '*' == their[1]:
continue
if our[1] == their[1] + '*':
continue
return False
return True
def postprocess_uast(self, desired_schema):
assert self.matches_schema(desired_schema)
if self.is_multi_test and (not self.multi_test_iter or not self.multi_test_var):
raise InputSchemaExtractorNotSupportedException("Multitest schema extractor hasn't found the multitest iter or multitest var")
for x in desired_schema:
if x[0] == 'out':
# it is common for schemas to be different only in whether the output is array or not
# hence allow the caller to choose the output type
self.output_type = x[1]
entry_point = self.entry_point
original_vars = (set([x[2] for x in get_func_vars(entry_point) if len(x) > 2]) | \
set([x[2] for x in get_func_args(entry_point) if len(x) > 2]))
for func_name in self.funcs_with_reads:
func = self.funcs[func_name]
original_vars |= (set([x[2] for x in get_func_vars(func) if len(x) > 2]) | \
set([x[2] for x in get_func_args(func) if len(x) > 2]))
original_vars -= set(self.remove_vars)
def arg_name(s):
if s == 'testN': return s
ord_ = 0
orig = s
while s in original_vars:
ord_ += 1
s = orig + str(ord_)
return s
def idx_name(s):
s = "%s_i" % arg_name(s)
ord_ = 0
orig = s
while s in original_vars:
ord_ += 1
s = orig + str(ord_)
return s
for block in self.replace_with_noops:
del block[:]
block.append("noop")
set_func_name(entry_point, '__main__')
args = []
vars_ = []
body = []
idx_reset = []
body_after = []
args_map = {}
args_idx_map = {}
for entry, block in self.process_input_blocks:
del block[:]
if entry[0] == 'size':
arg_var = ["var", OBJECT, arg_name(entry[2][0])] # TODO: OBJECT should be the actual type
replace_with = arg_var
if self.is_multi_test and entry != self.multi_test_var[1]:
arg_var[1] += '*'
arg_var = ["invoke", OBJECT, 'array_index', [arg_var, self.multi_test_iter]]
block.append("invoke")
block.append(INT)
block.append("len")
block.append([arg_var])
elif entry[0] in ['testN', 'in']:
arg_var = ["var", entry[1], arg_name(entry[2])]
tp = entry[1]
replace_with = arg_var
if self.is_multi_test and entry != self.multi_test_var[1]:
arg_var[1] += '*'
entry[1] += '*'
replace_with = ["invoke", tp, 'array_index', [arg_var, self.multi_test_iter]]
if entry[2] not in args_map:
args.append(arg_var)
args_map[entry[2]] = args[-1]
if entry[2] in self.bypassed_arrays:
continue
if tp.endswith("*") and tp != 'char*':
if entry[2] not in args_idx_map:
vars_.append(["var", INT, idx_name(entry[2])])
args_idx_map[entry[2]] = vars_[-1]
idx_reset.insert(0, ["assign", INT, vars_[-1], constant(INT, 0)])
block.append("invoke")
block.append(tp[:-1])
block.append("array_index")
inc_idx = ["var", INT, idx_name(entry[2])]
inc_idx = ["assign", INT, inc_idx, ['invoke', INT, '+', [inc_idx, constant(INT, 1)]]]
inc_idx = ["invoke", INT, '-', [inc_idx, constant(INT, 1)]]
block.append([replace_with, inc_idx])
else:
block[:] = replace_with
out_type = self.output_type
if out_type.endswith('*') and out_type != 'char*':
vars_.append(["var", out_type, '__ret'])
out_var = vars_[-1]
body = [["assign", out_type, out_var, ["invoke", out_type, "_ctor", []]]] + body
body_after += [["return", out_type, out_var]]
for block in self.process_output_blocks:
if block[0] == 'return': # has been processed already
continue
if out_type.endswith('*') and out_type != 'char*':
block_val = block[3][0]
del block[:]
if out_type == 'char**':
if get_expr_type(block_val) == 'char*':
block.append('assign')
block.append('char**')
block.append(out_var)
if block_val[0] == "val" and '\t' not in block_val[2] and ' ' not in block_val[3]:
block.append(['invoke', 'char**', 'array_concat', [out_var, block_val]])
else:
block.append(['invoke', 'char**', 'array_concat', [out_var, ['invoke', 'char**', 'string_split', [block_val, ['val', 'char*', ' \\t']]]]])
else:
block.append('invoke')
block.append('char**')
block.append('array_push')
block.append([out_var, ['invoke', STRING, 'str', [block_val]]])
else:
block.append('invoke')
block.append('void')
block.append('array_push')
block.append([out_var, block_val])
else:
assert len(block) == 4, block
block[0] = 'return'
if get_expr_type(block[3][0]) != 'char*' and self.output_type == 'char*':
block[2] = block[3][0]
block[2] = ['invoke', 'char*', 'str', [block[3][0]]]
else:
block[2] = block[3][0]
block.pop()
if not self.is_multi_test:
body = body + idx_reset
else:
assert self.multi_test_loop
self.multi_test_loop[3] = idx_reset + self.multi_test_loop[3]
misses_multi_test_iter_in_vars = self.multi_test_iter and all([x[2] != self.multi_test_iter[2] for x in get_func_vars(entry_point)])
if misses_multi_test_iter_in_vars:
vars_.append(self.multi_test_iter)
body.append(["assign", INT, self.multi_test_iter, ["val", INT, 0]])
get_func_args(entry_point)[:] = [x for x in args]
if self.multi_test_var:
self.multi_test_var[0][2] = arg_name(self.multi_test_var[1][2])
get_func_vars(entry_point)[:] = [x for x in get_func_vars(entry_point) if (not self.multi_test_var or x[2] != self.multi_test_var[0][2]) and not x[2] in self.remove_vars] + vars_
get_func_body(entry_point)[:] = body + [x for x in get_func_body(entry_point) if not (x[0] == 'while' and (len(x[3]) == 0 or (len(x[3]) == 1 and x[3][0][0] == 'noop')))] + body_after
for func in self.funcs_with_reads:
if get_func_name(self.funcs[func]) != '__main__':
get_func_args(self.funcs[func])[:] = get_func_args(self.funcs[func])[:] + args + vars_
get_func_vars(self.funcs[func])[:] = [x for x in get_func_vars(self.funcs[func]) if not x[2] in self.remove_vars]
for func in self.funcs_with_writes:
self.funcs[func][1] = out_type
if self.funcs[func][0] == 'ctor':
self.funcs[func][0] = 'func'
self.funcs[func][2] = self.funcs[func][2].replace('.__init__', '_')
get_func_body(self.funcs[func]).pop() # drop the return statement
if func in self.func_returns:
for stmt in self.func_returns[func]:
stmt[1] = out_type
stmt[2] = ["var", out_type, "__ret"]
for invoke in self.process_invokes_with_reads:
invoke[3] += args + vars_
for invoke in self.process_invokes_with_writes:
if invoke[0] == 'return': # already processed
continue
try:
invoke[2] = invoke[2].replace('.__init__', '_')
except:
print(invoke)
raise
invoke[1] = out_type
invoke[:] = ['return', VOID, [x for x in invoke]]
return self.uast
class ExecutorContext(object):
def __init__(self):
super(ExecutorContext, self).__init__()
self._registered_vars = set()
self._vals = {}
self._return_value = None
self._flow_control = None
self._instructions_count = 0
def register_var(self, var):
assert var[2] not in self._registered_vars, var[2]
self._registered_vars.add(var[2])
def set_val(self, var, val):
assert var[2] in self._registered_vars, var
self._vals[var[2]] = val
def get_val(self, var):
if var[2] not in self._vals:
assert False, var
return self._vals[var[2]]
def array_fill(a, b):
for idx in range(len(a)):
if isinstance(a, six.string_types):
raise UASTNotImplementedException("Mutable strings")
a[idx] = b
def map_put(a, b, c):
a[b] = c
def map_remove_key(a, y):
del a[y]
def array_map_clear(a):
if isinstance(a, list):
del a[:]
elif isinstance(a, SortedDict):
a.clear()
elif isinstance(a, SortedSet):
a.clear()
else:
assert False, type(a)
def array_remove_idx(a, y):
ret = a[y]
del a[y]
return ret
def array_remove_value(a, y):
y = a.index(y)
ret = a[y]
del a[y]
return ret
def magic_escape(x):
return x if x not in ['|', '\\', '+', '(', ')', ',', '[', ']'] else '\\' + x
def cmp_to_key(mycmp):
'Convert a cmp= function into a key= function'
class K:
def __init__(self, obj, *args):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
return K
DEFAULT_TYPE_FUNCS = {
'+': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'-': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'*': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'/': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'%': lambda x, y: 'int',
'>': lambda x, y: 'bool',
'<': lambda x, y: 'bool',
'>=': lambda x, y: 'bool',
'<=': lambda x, y: 'bool',
'==': lambda x, y: 'bool',
'!=': lambda x, y: 'bool',
'||': lambda x, y: 'bool',
'&&': lambda x, y: 'bool',
'sin': lambda x: 'real',
'cos': lambda x: 'real',
"str": lambda x: 'char*',
"len": lambda x: 'int',
"sqrt": lambda x: 'real',
"log": lambda x: 'real',
"ceil": lambda x: 'int',
"sort": lambda x: x,
"array_push": lambda x, y: 'void',
"array_index": lambda x, y: get_array_subtype(x),
"reverse": lambda x: x,
"sort_cmp": lambda x, y: x,
"concat": lambda x, y: 'char*',
"string_find": lambda x, y: 'int',
"string_find_last": lambda x, y: 'int',
"string_split": lambda x, y: type_array(x),
"map_get": lambda x, y: get_map_value_type(x),
"map_keys": lambda x: type_array(get_map_key_type(x)),
"map_values": lambda x: type_array(get_map_value_type(x)),
"map_put": lambda x, y, z: 'void',
"map_has_key": lambda x, y: 'bool',
'!': lambda x: x,
'~': lambda x: x,
'&': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'|': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'^': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'>>': lambda x, y: x,
'<<': lambda x, y: x,
'atan2': lambda x, y: 'real',
'pow': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'round': lambda x: 'int',
'floor': lambda x: 'int',
'clear': lambda x: 'void',
'min': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'max': lambda x, y: arithmetic_op_type(x, y, allow_same=True),
'abs': lambda x: x,
'lower': lambda x: 'char*',
'upper': lambda x: 'char*',
'fill': lambda x, y: 'void',
'copy_range': lambda x, y, z: x,
'array_index': lambda x, y: get_array_subtype(x),
'contains': lambda x, y: 'bool',
'string_replace_one': lambda x, y, z: x,
'string_replace_all': lambda x, y, z: x,
'array_concat': lambda x, y: x,
'string_insert': lambda x, y, z: x,
'string_trim': lambda x: x,
'substring': lambda x, y, z: x,
'substring_end': lambda x, y: x,
'array_push': lambda x, y: 'void',
'array_pop': lambda x: get_array_subtype(x),
'array_insert': lambda x, y, z: 'void',
'array_remove_idx': lambda x, y: get_array_subtype(x),
'array_remove_value': lambda x, y: get_array_subtype(x),
'array_find': lambda x, y: 'int',
'array_find_next': lambda x, y: 'int',
'set_push': lambda x, y: 'void',
'set_remove': lambda x, y: 'void',
'map_remove_key': lambda x, y: 'void',
'array_initializer': lambda *args: type_array(args[0]),
}
# TODO: for now passing the executor for sort_cmp, might want to find a different solution later
def get_default_funcs(executor):
funcs = {}
funcs['=='] = lambda x, y: x == y
funcs['!='] = lambda x, y: x != y
funcs['&&'] = lambda x, y: x and y
funcs['||'] = lambda x, y: x or y
funcs['!'] = lambda x: not x
funcs['~'] = lambda x: ~ x
funcs['<'] = lambda x, y: x < y
funcs['>'] = lambda x, y: x > y
funcs['<='] = lambda x, y: x <= y
funcs['>='] = lambda x, y: x >= y
funcs['*'] = lambda x, y: x * y
funcs['/'] = lambda x, y: x // y if not isinstance(x, float) and not isinstance(y, float) else x / y
funcs['%'] = lambda x, y: x % y
funcs['+'] = lambda x, y: x + y
funcs['-'] = lambda x, y: x - y
funcs['&'] = lambda x, y: x & y
funcs['|'] = lambda x, y: x | y
funcs['^'] = lambda x, y: x ^ y
funcs['>>'] = lambda x, y: x >> y
funcs['<<'] = lambda x, y: x << y
funcs['str'] = lambda x: str(x)
funcs['len'] = lambda x: len(x)
funcs['sqrt'] = lambda x: math.sqrt(x)
funcs['log'] = lambda x: math.log(x)
funcs['atan2'] = lambda x, y: math.atan2(x, y)
funcs['sin'] = lambda x: math.sin(x)
funcs['cos'] = lambda x: math.cos(x)
funcs['pow'] = lambda x, y: x ** y if y < 100 else pow(x, y, 1 << 64)
funcs['round'] = lambda x: math.floor(x + 0.5)
funcs['floor'] = lambda x: math.floor(x)
funcs['ceil'] = lambda x: math.ceil(x)
funcs['clear'] = array_map_clear
funcs['min'] = lambda a, b: min(a, b)
funcs['max'] = lambda a, b: max(a, b)
funcs['abs'] = lambda a: abs(a)
funcs['reverse'] = lambda a: list(reversed(a)) if not isinstance(a, six.string_types) else ''.join(reversed(a))
funcs['lower'] = lambda a: a.lower()
funcs['upper'] = lambda a: a.upper()
funcs['sort'] = lambda a: list(sorted(a)) if not a or not isinstance(a[0], dict) else list(sorted(a, key=lambda x: tuple(x.items())))
funcs['sort_cmp'] = lambda a, b: list(sorted(a, key=cmp_to_key(lambda x,y: executor.execute_func(b, [x,y]))))
funcs['fill'] = array_fill
funcs['copy_range'] = lambda arr, fr, to: [x for x in arr[fr:to]]
funcs['array_index'] = lambda x, y: x[y] if not isinstance(x, six.string_types) else ord(x[y])
funcs['contains'] = lambda x, y: y in x
funcs['string_find'] = lambda x, y: x.find(y if isinstance(y, six.string_types) else chr(y))
funcs['string_find_last'] = lambda x, y: x.rfind(y if isinstance(y, six.string_types) else chr(y))
funcs['string_replace_one'] = lambda x, y, z: x.replace(y if isinstance(y, six.string_types) else chr(y), z if isinstance(z, six.string_types) else chr(z), 1)
funcs['string_replace_all'] = lambda x, y, z: x.replace(y if isinstance(y, six.string_types) else chr(y), z if isinstance(z, six.string_types) else chr(z))
funcs['concat'] = lambda x, y: (x if isinstance(x, six.string_types) else chr(x)) + (y if isinstance(y, six.string_types) else chr(y))
funcs['array_concat'] = lambda x, y: x + y
funcs['string_insert'] = lambda x, pos, y: x[:pos] + (y if isinstance(y, six.string_types) else chr(y)) + x[pos:]
funcs['string_split'] = lambda x, y: [z for z in re.split('|'.join([magic_escape(_) for _ in y]), x) if z] if y != '' else [z for z in x]
funcs['string_trim'] = lambda x: x.strip()
funcs['substring'] = lambda x, y, z: x[y:z]
funcs['substring_end'] = lambda x, y: x[y:]
funcs['array_push'] = lambda x, y: x.append(y)
funcs['array_pop'] = lambda x: x.pop()
funcs['array_insert'] = lambda x, pos, y: x.insert(pos, y)
funcs['array_remove_idx'] = array_remove_idx
funcs['array_remove_value'] = array_remove_value
funcs['array_find'] = lambda x, y: x.index(y) if y in x else -1
funcs['array_find_next'] = lambda x, y, z: x.index(y, z) if y in x[z:] else -1
funcs['set_push'] = lambda x, y: x.add(y)
funcs['set_remove'] = lambda x, y: x.remove(y)
funcs['map_has_key'] = lambda x, y: y in x
funcs['map_put'] = map_put
funcs['map_get'] = lambda x, y: x[y] if y in x else None
funcs['map_keys'] = lambda x: list(x.keys())
funcs['map_values'] = lambda x: list(x.values())
funcs['map_remove_key'] = map_remove_key
funcs['array_initializer'] = lambda *x: list(x)
return funcs
class Executor(object):
def __init__(self, data, timeout=600):
super(Executor, self).__init__()
self.funcs = {get_func_name(func): func for func in data['funcs']}
self.types = {get_record_name(record): record for record in data['types']}
self.watchers = []
self.timeout = timeout
self.start_time = time.time()
self.funcs.update(get_default_funcs(self))
self.globals_ = {}
global_init_func = GLOBALS_NAME + ".__init__"
if global_init_func in self.funcs:
self.execute_func(global_init_func, [])
elif GLOBALS_NAME in self.types and len(self.types[GLOBALS_NAME][2]) > 2:
raise ValueError("Must have %s if %s struct is present and non empty (%s)." % (
global_init_func, GLOBALS_NAME, self.types[GLOBALS_NAME]
))
def _observe_read(self, context, read_store, args):
if self.watchers:
args[-1] = (args[-1][0], tuplify(args[-1][1])) # tuplify the new value
if read_store is not None:
read_store[0] = args
else:
evt = WatcherEvent("read", self, context, args)
self._watch(evt)
elif read_store is not None:
read_store[0] = []
def _observe_write(self, context, args):
if self.watchers:
args[-1] = (args[-1][0], tuplify(args[-1][1])) # tuplify the new value
evt = WatcherEvent("write", self, context, args)
self._watch(evt)
def register_watcher(self, watcher):
self.watchers.append(watcher)
def _watch(self, event):
for watcher in self.watchers:
watcher.watch(event)
def compute_lhs(self, context, expr, read_store):
assert read_store is not None and read_store[1]
return self.compute_expression(context, expr, read_store=read_store)
@watchable("expression")
def compute_expression(self, context, expr, read_store=None):
is_lhs = read_store is not None and read_store[1]
if is_lhs and not is_assigneable(expr):
raise UASTNotImplementedException("Non-lhs expression as argument while computing lhs")
if expr[0] == 'assign':
rhs = self.compute_expression(context, expr[3])
assert is_assigneable(expr[2]), expr
if expr[2][0] == 'var':
# Fail if integer values are too big.
if isinstance(rhs, int) and abs(rhs) > LARGEST_INT:
raise OverflowError()
context.set_val(expr[2], rhs)
# Same as with the field.
inner_read_store = [None, is_lhs]
# Calling to compute_expression to observe before_expression and after_expression events. compute
# expression would also call to observe_read that we would prefer to skip, which we ignore here by not
# using the contents of inner_read_store.
self.compute_expression(context, expr[2], read_store=inner_read_store)
self._observe_write(context, [(expr[2][2], rhs)])
elif expr[2][0] == 'field':
field = expr[2]
inner_read_store = [None, True]
record = self.compute_lhs(context, field[2], read_store=inner_read_store)
record[field[3]] = rhs
assert inner_read_store[0] is not None
dependants = inner_read_store[0]
self._observe_write(context, dependants + [(field[3], rhs)])
elif expr[2][0] == 'invoke' and expr[2][2] == 'array_index':
args = expr[2][3]
deref = args[0]
inner_read_store = [None, True]
array = self.compute_lhs(context, args[0], read_store=inner_read_store)
assert inner_read_store[0] is not None
array_index = int(self.compute_expression(context, args[1]))
assert_val_matches_type(array_index, INT)
if isinstance(array, six.string_types):
# a hack way to achieve some sort of mutability in strings
new_val = array[:array_index] + (rhs if isinstance(rhs, six.string_types) else chr(rhs)) + array[array_index+1:]
self.compute_expression(context, ["assign", STRING, args[0], constant(STRING, new_val)])
else:
array[array_index] = rhs
assert inner_read_store[0] is not None
dependants = inner_read_store[0]
self._observe_write(context, dependants + [(array_index, rhs)])
else:
assert False, expr
ret = rhs
elif expr[0] == 'var':
ret = context.get_val(expr)
self._observe_read(context, read_store, [(expr[2], ret)])
elif expr[0] == 'field':
inner_read_store = [None, is_lhs]
obj = self.compute_expression(context, expr[2], read_store=inner_read_store)
ret = obj[expr[3]]
dependants = inner_read_store[0]
if dependants is not None:
self._observe_read(context, read_store, dependants + [(expr[3], ret)])
elif expr[0] == 'val':
assert_val_matches_type(expr[2], expr[1])
ret = expr[2]
if isinstance(ret, six.string_types):
ret = ret.replace("\\n", "\n").replace("\\t", "\t") # TODO: proper unescaping
elif expr[0] == 'invoke':
if expr[2] in ['&&', '||']: # short circuiting
larg = self.compute_expression(context, expr[3][0])
assert type(larg) == bool
if (larg and expr[2] == '||') or (not larg and expr[2] == '&&'):
ret = larg
else:
ret = self.compute_expression(context, expr[3][1])
else:
if expr[2] == 'array_index':
inner_read_store = [None, is_lhs]
arg_vals = [self.compute_expression(context, x, read_store=inner_read_store) for x in expr[3][:1]]
arg_vals += [self.compute_expression(context, x) for x in expr[3][1:]]
else:
try:
arg_vals = [self.compute_expression(context, x) for x in expr[3]]
except:
#print expr
raise
if expr[2] == 'str' and expr[3][0][1] == CHAR: # TODO: fix it by replacing "str" with cast
ret = chr(arg_vals[0])
elif expr[2] == '_ctor':
ret = self.execute_ctor(expr[1], arg_vals, expressions=expr[3])
else:
try:
ret = self.execute_func(expr[2], arg_vals, expressions=expr[3])
except Exception:
raise
if expr[2] == 'array_index':
dependants = inner_read_store[0]
if dependants is not None:
self._observe_read(context, read_store, dependants + [(arg_vals[1], ret)])
if expr[2] == 'array_initializer' and expr[1] == STRING: # TODO: fix somehow
ret = ''.join([chr(x) for x in ret])
elif get_expr_type(expr) == STRING and type(ret) == list:
assert len(ret) == 0 or isinstance(ret[0], six.string_types), ret
ret = ''.join(ret)
elif expr[0] == '?:':
cond = self.compute_expression(context, expr[2])
if cond:
ret = self.compute_ternary_expression(context, expr[2], expr[3])
else:
ret = self.compute_ternary_expression(context, expr[2], expr[4])
elif expr[0] == 'cast':
assert can_cast(expr[1], expr[2][1]), expr
ret = self.compute_expression(context, expr[2])
if is_int_type(expr[1]):
ret = int(float(ret))
elif expr[1] == REAL:
ret = float(ret)
return ret
else:
raise UASTNotImplementedException("Execution of expressoin %s" % expr)
assert False, expr
try:
assert_val_matches_type(ret, expr[1])
except Exception as e:
#print("Type mismatch between %s and %s while evaluating: %s (%s: %s)" % (
# str(ret)[:100], expr[1], expr, type(e), e), file=sys.stderr)
#val_matches_type(ret, expr[1], True)
raise
if expr[1] in [REAL]:
ret = float(ret)
elif is_int_type(expr[1]):
ret = int(ret)
return ret
@watchable("block")
def execute_block(self, context, block):
for stmt in block:
if self.execute_statement(context, stmt):
return True
if context._flow_control in ['break', 'continue']:
break
assert context._flow_control is None
return False
@watchable("if_block")
def execute_if_block(self, context, expr, block):
# expr can be used by the watchers, e.g. for constructing the control-flow.
return self.execute_block(context, block)
@watchable("foreach_block")
def execute_foreach_block(self, context, expr, block):
# expr can be used by the watchers, e.g. for constructing the control-flow.
return self.execute_block(context, block)
@watchable("while_block")
def execute_while_block(self, context, expr, block):
# expr can be used by the watchers, e.g. for constructing the control-flow.
return self.execute_block(context, block)
@watchable("ternary_expression")
def compute_ternary_expression(self, context, pred_expr, expr):
# pred_expr can be used by the watchers, e.g. for constructing the control-flow.
return self.compute_expression(context, expr)
@watchable("statement")
def execute_statement(self, context, stmt):
if time.time() - self.start_time > self.timeout:
raise UASTTimeLimitExceeded()
context._instructions_count += 1
if DEBUG_INFO and context._instructions_count >= 10000 and hasattr(stmt, 'position'):
context._instructions_count = 0
print("DEBUG INFO: pos:", stmt.position, 'vars:', context._vals, file=sys.stderr)
if stmt[0] == 'if':
cond = self.compute_expression(context, stmt[2])
assert isinstance(cond, bool), (cond, stmt[2])
if cond:
return self.execute_if_block(context, stmt[2], stmt[3])
else:
return self.execute_if_block(context, stmt[2], stmt[4])
elif stmt[0] == 'foreach':
lst = self.compute_expression(context, stmt[3])
need_ord = isinstance(lst, six.string_types)
for x in lst:
context.set_val(stmt[2], x if not need_ord else ord(x))
if self.execute_foreach_block(context, stmt[3], stmt[4]):
return True
if context._flow_control == 'break':
context._flow_control = None
break
elif context._flow_control == 'continue':
context._flow_control = None
elif stmt[0] == 'while':
while True:
cond = self.compute_expression(context, stmt[2])
assert isinstance(cond, bool)
if not cond:
break
if self.execute_while_block(context, stmt[2], stmt[3]):
return True
if context._flow_control == 'break':
context._flow_control = None
break
elif context._flow_control == 'continue':
context._flow_control = None
assert not self.execute_while_block(context, stmt[2], stmt[4])
elif stmt[0] == 'break':
context._flow_control = 'break'
return False
elif stmt[0] == 'continue':
context._flow_control = 'continue'
return False
elif stmt[0] == 'return':
context._return_value = self.compute_expression(context, stmt[2])
return True
elif stmt[0] == 'noop':
return False
else:
self.compute_expression(context, stmt)
def execute_ctor(self, ret_type, args, expressions):
if ret_type.endswith("*"):
if len(args) == 0:
return [] if ret_type != 'char*' else ""
elif len(args) == 1 and not val_matches_type(args[0], INT):
# initialize with the first argument
return list(args[0])
else:
assert len(ret_type) > len(args) and all([x == '*' for x in ret_type[-len(args):]]), "TYPE: %s, ARGS: %s" % (ret_type, args)
subtype = ret_type
for arg in args:
assert_val_matches_type(arg, INT)
subtype = get_array_subtype(subtype)
# We measured the size of the N-dimensional array initialized with default values of different types and
# measured the approx number of bytes used by each element. Based on this we cut the maximum array size
# that we can initialize.
approx_memory_overhead = {
INT: 8,
REAL: 8,
CHAR: 4,
STRING: 4,
BOOL: 1
}
memory_cutoff = 10*2**20 # Allocate no more than 10MiB during array initialization.
assert functools.reduce(mul, args) * approx_memory_overhead[subtype] <= memory_cutoff, (
"CTOR allocates too much memory %s %s, %s" % (ret_type, args, expressions))
return np.full(tuple(args), default_value(subtype)).tolist()
elif ret_type.endswith("%"):
return SortedSet() if len(args) == 0 else SortedSet(args[0])
elif ret_type.endswith('>'):
return SortedDict()
elif ret_type == INT:
assert len(args) == 0
return 0
elif ret_type.endswith('#'):
return self.execute_func(ret_type[:-1] + ".__init__", args, expressions=expressions)
else:
assert False, ret_type
@watchable('func_block')
def execute_func_block(self, context, func_name, func_vars, func_args, args_vals, expressions, block):
# func_name, func_vars, func_args, args_vals and expressions can be used by the watchers, e.g. for constructing
# the data-flow.
assert len(func_args) == len(args_vals)
assert expressions is None or len(expressions) == len(func_args)
self.execute_block(context, block)
def execute_func(self, func_name, args, tolerate_missing_this=False, expressions=None):
context = ExecutorContext()
if func_name not in self.funcs:
raise UASTNotImplementedException("Interpreter function %s" % func_name)
func = self.funcs[func_name]
if callable(func):
try:
return func(*args)
except Exception:
# print(func_name, args)
raise
if self.watchers:
self._watch(WatcherEvent("before_func", self, context, func, args))
globals_var = var(GLOBALS_NAME, func[1])
context.register_var(globals_var)
context.set_val(globals_var, self.globals_)
if func[0] == 'ctor':
ctor_type_name = type_to_record_name(func[1])
ctor_type = self.types[ctor_type_name]
ret_var = var("this", func[1])
context.register_var(ret_var)
context.set_val(ret_var, {})
if tolerate_missing_this and len(args) == len(get_func_args(func)) + 1:
args = args[1:]
if len(args) != len(get_func_args(func)):
#print >> sys.stderr, func
#print >> sys.stderr, args
#print >> sys.stderr, get_func_args(func)
raise UASTNotImplementedException("Polymorphism (len(%s) <> len(%s) when calling %s)" % (args, get_func_args(func), get_func_name(func)))
for arg, arg_def in zip(args, get_func_args(func)):
assert_val_matches_type(arg, get_expr_type(arg_def))
context.register_var(arg_def)
context.set_val(arg_def, arg)
for var_ in get_func_vars(func):
context.register_var(var_)
self.execute_func_block(context, func_name, get_func_vars(func), get_func_args(func), args, expressions, get_func_body(func))
assert_val_matches_type(context._return_value, get_func_return_type(func))
if self.watchers:
self._watch(WatcherEvent("after_func", self, context, context._return_value, func, args))
return context._return_value
| apache-2.0 | -7,174,344,005,736,294,000 | 37.396074 | 257 | 0.52006 | false |
jni/networkx | networkx/algorithms/components/connected.py | 10 | 4068 | # -*- coding: utf-8 -*-
"""
Connected components.
"""
# Copyright (C) 2004-2013 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
import networkx as nx
from networkx.utils.decorators import not_implemented_for
from networkx.algorithms.shortest_paths \
import single_source_shortest_path_length as sp_length
__authors__ = "\n".join(['Eben Kenah',
'Aric Hagberg <aric.hagberg@gmail.com>'
'Christopher Ellison'])
__all__ = ['number_connected_components', 'connected_components',
'connected_component_subgraphs','is_connected',
'node_connected_component']
@not_implemented_for('directed')
def connected_components(G):
"""Generate connected components.
Parameters
----------
G : NetworkX graph
An undirected graph
Returns
-------
comp : generator of lists
A list of nodes for each component of G.
Examples
--------
Generate a sorted list of connected components, largest first.
>>> G = nx.path_graph(4)
>>> G.add_path([10, 11, 12])
>>> sorted(nx.connected_components(G), key = len, reverse=True)
[[0, 1, 2, 3], [10, 11, 12]]
See Also
--------
strongly_connected_components
Notes
-----
For undirected graphs only.
"""
seen={}
for v in G:
if v not in seen:
c = sp_length(G, v)
yield list(c)
seen.update(c)
@not_implemented_for('directed')
def connected_component_subgraphs(G, copy=True):
"""Generate connected components as subgraphs.
Parameters
----------
G : NetworkX graph
An undirected graph.
copy: bool (default=True)
If True make a copy of the graph attributes
Returns
-------
comp : generator
A generator of graphs, one for each connected component of G.
Examples
--------
>>> G = nx.path_graph(4)
>>> G.add_edge(5,6)
>>> graphs = list(nx.connected_component_subgraphs(G))
See Also
--------
connected_components
Notes
-----
For undirected graphs only.
Graph, node, and edge attributes are copied to the subgraphs by default.
"""
for c in connected_components(G):
if copy:
yield G.subgraph(c).copy()
else:
yield G.subgraph(c)
def number_connected_components(G):
"""Return the number of connected components.
Parameters
----------
G : NetworkX graph
An undirected graph.
Returns
-------
n : integer
Number of connected components
See Also
--------
connected_components
Notes
-----
For undirected graphs only.
"""
return len(list(connected_components(G)))
@not_implemented_for('directed')
def is_connected(G):
"""Return True if the graph is connected, false otherwise.
Parameters
----------
G : NetworkX Graph
An undirected graph.
Returns
-------
connected : bool
True if the graph is connected, false otherwise.
Examples
--------
>>> G = nx.path_graph(4)
>>> print(nx.is_connected(G))
True
See Also
--------
connected_components
Notes
-----
For undirected graphs only.
"""
if len(G) == 0:
raise nx.NetworkXPointlessConcept('Connectivity is undefined ',
'for the null graph.')
return len(sp_length(G, next(G.nodes_iter()))) == len(G)
@not_implemented_for('directed')
def node_connected_component(G, n):
"""Return the nodes in the component of graph containing node n.
Parameters
----------
G : NetworkX Graph
An undirected graph.
n : node label
A node in G
Returns
-------
comp : lists
A list of nodes in component of G containing node n.
See Also
--------
connected_components
Notes
-----
For undirected graphs only.
"""
return list(sp_length(G, n))
| bsd-3-clause | 6,864,915,984,074,193,000 | 21.726257 | 76 | 0.577434 | false |
NickShaffner/rhea | rhea/build/boards/xilinx/_xula.py | 2 | 7089 | #
# Copyright (c) 2014-2015 Christopher Felton
#
from rhea.build import FPGA
from rhea.build.extintf import Port
# @todo: get SDRAM interface from rhea.cores.sdram
# from ...extintf._sdram import SDRAM
from rhea.build.toolflow import ISE
class Xula(FPGA):
vendor = 'xilinx'
family = 'spartan3A'
device = 'XC3S200A'
package = 'VQ100'
speed = '-4'
_name = 'xula'
default_clocks = {
'clock': dict(frequency=12e6, pins=(43,)),
'chan_clk': dict(frequency=1e6, pins=(44,))
}
default_ports = {
'chan': dict(pins=(36, 37, 39, 50, 52, 56, 57, 61, # 0-7
62, 68, 72, 73, 82, 83, 84, 35, # 8-15
34, 33, 32, 21, 20, 19, 13, 12, # 17-23
7, 4, 3, 97, 94, 93, 89, 88)) # 24-31
}
def get_flow(self, top=None):
return ISE(brd=self, top=top)
class XulaStickItMB(Xula):
def __init__(self):
""" StickIt board port definitions
This class defines the port to pin mapping for the Xess StickIt
board. The Xula module can be plugged into the StickIt board.
The StickIt board provides connections to many common physical
interfaces: pmod, shields, etc. Many of the pins are redefined
to match the names of the connector connections
"""
chan_pins = self.default_ports['chan']['pins']
chan_pins = chan_pins + self.default_clocks['chan_clk']['pins']
assert len(chan_pins) == 33
self.default_ports['chan']['pins'] = chan_pins
# the following are the bit-selects (chan[idx]) and not
# the pins.
self.add_port_name('pm1', 'chan', (15, 32, 16, 0, # pmod A
11, 28, 13, 14)) # pmod B
self.add_port_name('pm2', 'chan', (17, 1, 18, 3, # pmod A
15, 32, 16, 0)) # pmod B
self.add_port_name('pm3', 'chan', (20, 4, 21, 5, # pmod A
17, 1, 18, 3)) # pmod B
self.add_port_name('pm4', 'chan', (22, 6, 23, 7, # pmod A
20, 4, 21, 5)) # pmod B
self.add_port_name('pm5', 'chan', (8, 25, 26, 10, # pmod A
22, 6, 23, 7)) # pmod B
self.add_port_name('pm6', 'chan', (11, 28, 13, 14, # pmod A
8, 25, 26, 10)) # pmod B
# @todo: add the wing defintions
class Xula2(FPGA):
vendor = 'xilinx'
family = 'spartan6'
device = 'XC6SLX25'
package = 'FTG256'
speed = '-2'
_name = 'xula2'
default_clocks = {
'clock': dict(frequency=12e6, pins=('A9',)),
'chan_clk': dict(frequency=1e6, pins=('T7',))
}
default_ports = {
'chan': dict(pins=('R7','R15','R16','M15','M16','K15', #0-5
'K16','J16','J14','F15','F16','C16', #6-11
'C15','B16','B15','T4','R2','R1', #12-17
'M2','M1','K3','J4','H1','H2', #18-23
'F1','F2','E1','E2','C1','B1', #24-29
'B2','A2',) )
}
default_extintf = {
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# VGA:
'vga': None,
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# SDRAM: the Xula2 has a 256Mbit WINBond SDRAM,
# http://www.winbond.com/hq/enu/ProductAndSales/ProductLines/SpecialtyDRAM/SDRAM/W9825G6JH.htm
# @todo: mege into rhea.syste/rhea.cores intefaces
# 'sdram': SDRAM(
# Port('addr', pins=('E4', 'E3', 'D3', 'C3', # 0-3
# 'B12', 'A12', 'D12', 'E12', # 4-7
# 'G16', 'G12', 'F4', 'G11', # 8-11
# 'H13',) # 12
# ),
# Port('data', pins=('P6', 'T6', 'T5', 'P5', # 0-3
# 'R5', 'N5', 'P4', 'N4', # 4-7
# 'P12', 'R12', 'T13', 'T14', # 8-11
# 'R14', 'T15', 'T12', 'P11',) # 12-15
# ),
# Port('bs', pins=('H3', 'G3',) ),
# Port('cas', pins=('L3',) ),
# Port('ras', pins=('L4',) ),
# Port('ldqm', pins=('M4',) ),
# Port('udqm', pins=('L13',) ),
# Port('clk', pins=('K12',) ),
# Port('clkfb', pins=('K11',) ),
# Port('cs', pins=('H4',) ),
# Port('we', pins=('M3',) ),
# Port('cke', pins=('J12',)),
#
# # timing information, all in ns
# timing = dict(
# init = 200000.0,
# ras = 45.0,
# rcd = 20.0,
# ref = 64000000.0,
# rfc = 65.0,
# rp = 20.0,
# xsr = 75.0
# ),
# ddr = 0 # single data rate
# ),
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# SPI and MicroSD
#'flash': _extintf(
# Port('sclk', pins=()),
# Port('sdi', pins=()),
# Port('sdo', pins=()),
# port('cs', pins=()),
# ),
#
#'microsd' : None,
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
}
def get_flow(self, top=None):
return ISE(brd=self, top=top)
class Xula2StickItMB(Xula2):
def __init__(self):
""" """
# to simplify the connector mapping append chan_clk to the
# end of the channel pins. Note overlapping ports cannot
# be simultaneously used.
chan_pins = self.default_ports['chan']['pins']
chan_pins = chan_pins + self.default_clocks['chan_clk']['pins']
# assert len(chan_pins) == 33, "len == {}".format(len(chan_pins))
self.default_ports['chan']['pins'] = chan_pins
super(Xula2StickItMB, self).__init__()
self.add_port_name('pm1', 'chan', (0, 2, 4, 5,
32, 1, 3, 5))
self.add_port_name('pm2', 'chan', (15, 17, 19, 21,
16, 18, 20, 22))
self.add_port_name('pm3', 'chan', (23, 25, 27, 29,
24, 26, 28, 30))
# @todo: add grove board connectors
# RPi GPIO connector, each port defined as the
self.add_port_name('bcm2_sda', 'chan', 31)
self.add_port_name('bcm3_scl', 'chan', 30)
self.add_port_name('bcm4_gpclk0', 'chan', 29)
self.add_port_name('bcm17', 'chan', 28)
self.add_port_name('bcm27_pcm_d', 'chan', 27)
self.add_port_name('bcm22', 'chan', 26)
# ...
self.add_port_name('bcm14_txd', 'chan', 14)
self.add_port_name('bcm15_rxd', 'chan', 13)
# @todo: finish ...
| mit | 4,330,671,391,191,900,700 | 34.808081 | 102 | 0.411765 | false |
insertnamehere1/maraschino | lib/sqlalchemy/dialects/sybase/base.py | 22 | 15166 | # sybase/base.py
# Copyright (C) 2010-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
# get_select_precolumns(), limit_clause() implementation
# copyright (C) 2007 Fisch Asset Management
# AG http://www.fam.ch, with coding by Alexander Houben
# alexander.houben@thor-solutions.ch
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for Sybase Adaptive Server Enterprise (ASE).
Note that this dialect is no longer specific to Sybase iAnywhere.
ASE is the primary support platform.
"""
import operator
from sqlalchemy.sql import compiler, expression, text, bindparam
from sqlalchemy.engine import default, base, reflection
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import schema as sa_schema
from sqlalchemy import util, sql, exc
from sqlalchemy.types import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
TEXT,DATE,DATETIME, FLOAT, NUMERIC,\
BIGINT,INT, INTEGER, SMALLINT, BINARY,\
VARBINARY, DECIMAL, TIMESTAMP, Unicode,\
UnicodeText
RESERVED_WORDS = set([
"add", "all", "alter", "and",
"any", "as", "asc", "backup",
"begin", "between", "bigint", "binary",
"bit", "bottom", "break", "by",
"call", "capability", "cascade", "case",
"cast", "char", "char_convert", "character",
"check", "checkpoint", "close", "comment",
"commit", "connect", "constraint", "contains",
"continue", "convert", "create", "cross",
"cube", "current", "current_timestamp", "current_user",
"cursor", "date", "dbspace", "deallocate",
"dec", "decimal", "declare", "default",
"delete", "deleting", "desc", "distinct",
"do", "double", "drop", "dynamic",
"else", "elseif", "encrypted", "end",
"endif", "escape", "except", "exception",
"exec", "execute", "existing", "exists",
"externlogin", "fetch", "first", "float",
"for", "force", "foreign", "forward",
"from", "full", "goto", "grant",
"group", "having", "holdlock", "identified",
"if", "in", "index", "index_lparen",
"inner", "inout", "insensitive", "insert",
"inserting", "install", "instead", "int",
"integer", "integrated", "intersect", "into",
"iq", "is", "isolation", "join",
"key", "lateral", "left", "like",
"lock", "login", "long", "match",
"membership", "message", "mode", "modify",
"natural", "new", "no", "noholdlock",
"not", "notify", "null", "numeric",
"of", "off", "on", "open",
"option", "options", "or", "order",
"others", "out", "outer", "over",
"passthrough", "precision", "prepare", "primary",
"print", "privileges", "proc", "procedure",
"publication", "raiserror", "readtext", "real",
"reference", "references", "release", "remote",
"remove", "rename", "reorganize", "resource",
"restore", "restrict", "return", "revoke",
"right", "rollback", "rollup", "save",
"savepoint", "scroll", "select", "sensitive",
"session", "set", "setuser", "share",
"smallint", "some", "sqlcode", "sqlstate",
"start", "stop", "subtrans", "subtransaction",
"synchronize", "syntax_error", "table", "temporary",
"then", "time", "timestamp", "tinyint",
"to", "top", "tran", "trigger",
"truncate", "tsequal", "unbounded", "union",
"unique", "unknown", "unsigned", "update",
"updating", "user", "using", "validate",
"values", "varbinary", "varchar", "variable",
"varying", "view", "wait", "waitfor",
"when", "where", "while", "window",
"with", "with_cube", "with_lparen", "with_rollup",
"within", "work", "writetext",
])
class _SybaseUnitypeMixin(object):
"""these types appear to return a buffer object."""
def result_processor(self, dialect, coltype):
def process(value):
if value is not None:
return str(value) #.decode("ucs-2")
else:
return None
return process
class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
__visit_name__ = 'UNICHAR'
class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
__visit_name__ = 'UNIVARCHAR'
class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText):
__visit_name__ = 'UNITEXT'
class TINYINT(sqltypes.Integer):
__visit_name__ = 'TINYINT'
class BIT(sqltypes.TypeEngine):
__visit_name__ = 'BIT'
class MONEY(sqltypes.TypeEngine):
__visit_name__ = "MONEY"
class SMALLMONEY(sqltypes.TypeEngine):
__visit_name__ = "SMALLMONEY"
class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
__visit_name__ = "UNIQUEIDENTIFIER"
class IMAGE(sqltypes.LargeBinary):
__visit_name__ = 'IMAGE'
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_):
return self.visit_IMAGE(type_)
def visit_boolean(self, type_):
return self.visit_BIT(type_)
def visit_unicode(self, type_):
return self.visit_NVARCHAR(type_)
def visit_UNICHAR(self, type_):
return "UNICHAR(%d)" % type_.length
def visit_UNIVARCHAR(self, type_):
return "UNIVARCHAR(%d)" % type_.length
def visit_UNITEXT(self, type_):
return "UNITEXT"
def visit_TINYINT(self, type_):
return "TINYINT"
def visit_IMAGE(self, type_):
return "IMAGE"
def visit_BIT(self, type_):
return "BIT"
def visit_MONEY(self, type_):
return "MONEY"
def visit_SMALLMONEY(self, type_):
return "SMALLMONEY"
def visit_UNIQUEIDENTIFIER(self, type_):
return "UNIQUEIDENTIFIER"
ischema_names = {
'integer' : INTEGER,
'unsigned int' : INTEGER, # TODO: unsigned flags
'unsigned smallint' : SMALLINT, # TODO: unsigned flags
'unsigned bigint' : BIGINT, # TODO: unsigned flags
'bigint': BIGINT,
'smallint' : SMALLINT,
'tinyint' : TINYINT,
'varchar' : VARCHAR,
'long varchar' : TEXT, # TODO
'char' : CHAR,
'decimal' : DECIMAL,
'numeric' : NUMERIC,
'float' : FLOAT,
'double' : NUMERIC, # TODO
'binary' : BINARY,
'varbinary' : VARBINARY,
'bit': BIT,
'image' : IMAGE,
'timestamp': TIMESTAMP,
'money': MONEY,
'smallmoney': MONEY,
'uniqueidentifier': UNIQUEIDENTIFIER,
}
class SybaseExecutionContext(default.DefaultExecutionContext):
_enable_identity_insert = False
def set_ddl_autocommit(self, connection, value):
"""Must be implemented by subclasses to accommodate DDL executions.
"connection" is the raw unwrapped DBAPI connection. "value"
is True or False. when True, the connection should be configured
such that a DDL can take place subsequently. when False,
a DDL has taken place and the connection should be resumed
into non-autocommit mode.
"""
raise NotImplementedError()
def pre_exec(self):
if self.isinsert:
tbl = self.compiled.statement.table
seq_column = tbl._autoincrement_column
insert_has_sequence = seq_column is not None
if insert_has_sequence:
self._enable_identity_insert = \
seq_column.key in self.compiled_parameters[0]
else:
self._enable_identity_insert = False
if self._enable_identity_insert:
self.cursor.execute("SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(tbl))
if self.isddl:
# TODO: to enhance this, we can detect "ddl in tran" on the
# database settings. this error message should be improved to
# include a note about that.
if not self.should_autocommit:
raise exc.InvalidRequestError(
"The Sybase dialect only supports "
"DDL in 'autocommit' mode at this time.")
self.root_connection.engine.logger.info(
"AUTOCOMMIT (Assuming no Sybase 'ddl in tran')")
self.set_ddl_autocommit(
self.root_connection.connection.connection,
True)
def post_exec(self):
if self.isddl:
self.set_ddl_autocommit(self.root_connection, False)
if self._enable_identity_insert:
self.cursor.execute(
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer.
format_table(self.compiled.statement.table)
)
def get_lastrowid(self):
cursor = self.create_cursor()
cursor.execute("SELECT @@identity AS lastrowid")
lastrowid = cursor.fetchone()[0]
cursor.close()
return lastrowid
class SybaseSQLCompiler(compiler.SQLCompiler):
ansi_bind_rules = True
extract_map = util.update_copy(
compiler.SQLCompiler.extract_map,
{
'doy': 'dayofyear',
'dow': 'weekday',
'milliseconds': 'millisecond'
})
def get_select_precolumns(self, select):
s = select._distinct and "DISTINCT " or ""
# TODO: don't think Sybase supports
# bind params for FIRST / TOP
if select._limit:
#if select._limit == 1:
#s += "FIRST "
#else:
#s += "TOP %s " % (select._limit,)
s += "TOP %s " % (select._limit,)
if select._offset:
if not select._limit:
# FIXME: sybase doesn't allow an offset without a limit
# so use a huge value for TOP here
s += "TOP 1000000 "
s += "START AT %s " % (select._offset+1,)
return s
def get_from_hint_text(self, table, text):
return text
def limit_clause(self, select):
# Limit in sybase is after the select keyword
return ""
def visit_extract(self, extract, **kw):
field = self.extract_map.get(extract.field, extract.field)
return 'DATEPART("%s", %s)' % (
field, self.process(extract.expr, **kw))
def for_update_clause(self, select):
# "FOR UPDATE" is only allowed on "DECLARE CURSOR"
# which SQLAlchemy doesn't use
return ''
def order_by_clause(self, select, **kw):
kw['literal_binds'] = True
order_by = self.process(select._order_by_clause, **kw)
# SybaseSQL only allows ORDER BY in subqueries if there is a LIMIT
if order_by and (not self.is_subquery() or select._limit):
return " ORDER BY " + order_by
else:
return ""
class SybaseDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + \
self.dialect.type_compiler.process(column.type)
if column.table is None:
raise exc.InvalidRequestError(
"The Sybase dialect requires Table-bound "
"columns in order to generate DDL")
seq_col = column.table._autoincrement_column
# install a IDENTITY Sequence if we have an implicit IDENTITY column
if seq_col is column:
sequence = isinstance(column.default, sa_schema.Sequence) \
and column.default
if sequence:
start, increment = sequence.start or 1, \
sequence.increment or 1
else:
start, increment = 1, 1
if (start, increment) == (1, 1):
colspec += " IDENTITY"
else:
# TODO: need correct syntax for this
colspec += " IDENTITY(%s,%s)" % (start, increment)
else:
if column.nullable is not None:
if not column.nullable or column.primary_key:
colspec += " NOT NULL"
else:
colspec += " NULL"
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
return colspec
def visit_drop_index(self, drop):
index = drop.element
return "\nDROP INDEX %s.%s" % (
self.preparer.quote_identifier(index.table.name),
self.preparer.quote(
self._index_identifier(index.name), index.quote)
)
class SybaseIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
class SybaseDialect(default.DefaultDialect):
name = 'sybase'
supports_unicode_statements = False
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
supports_native_boolean = False
supports_unicode_binds = False
postfetch_lastrowid = True
colspecs = {}
ischema_names = ischema_names
type_compiler = SybaseTypeCompiler
statement_compiler = SybaseSQLCompiler
ddl_compiler = SybaseDDLCompiler
preparer = SybaseIdentifierPreparer
def _get_default_schema_name(self, connection):
return connection.scalar(
text("SELECT user_name() as user_name",
typemap={'user_name':Unicode})
)
def initialize(self, connection):
super(SybaseDialect, self).initialize(connection)
if self.server_version_info is not None and\
self.server_version_info < (15, ):
self.max_identifier_length = 30
else:
self.max_identifier_length = 255
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
if schema is None:
schema = self.default_schema_name
result = connection.execute(
text("select sysobjects.name from sysobjects, sysusers "
"where sysobjects.uid=sysusers.uid and "
"sysusers.name=:schemaname and "
"sysobjects.type='U'",
bindparams=[
bindparam('schemaname', schema)
])
)
return [r[0] for r in result]
def has_table(self, connection, tablename, schema=None):
if schema is None:
schema = self.default_schema_name
result = connection.execute(
text("select sysobjects.name from sysobjects, sysusers "
"where sysobjects.uid=sysusers.uid and "
"sysobjects.name=:tablename and "
"sysusers.name=:schemaname and "
"sysobjects.type='U'",
bindparams=[
bindparam('tablename', tablename),
bindparam('schemaname', schema)
])
)
return result.scalar() is not None
def reflecttable(self, connection, table, include_columns):
raise NotImplementedError()
| mit | -7,744,394,957,950,036,000 | 33.9447 | 84 | 0.57497 | false |
raghavs1108/DataPlotter | examples/GLVolumeItem.py | 28 | 1968 | # -*- coding: utf-8 -*-
"""
Demonstrates GLVolumeItem for displaying volumetric data.
"""
## Add path to library (just for examples; you do not need this)
import initExample
from pyqtgraph.Qt import QtCore, QtGui
import pyqtgraph.opengl as gl
app = QtGui.QApplication([])
w = gl.GLViewWidget()
w.opts['distance'] = 200
w.show()
w.setWindowTitle('pyqtgraph example: GLVolumeItem')
#b = gl.GLBoxItem()
#w.addItem(b)
g = gl.GLGridItem()
g.scale(10, 10, 1)
w.addItem(g)
import numpy as np
## Hydrogen electron probability density
def psi(i, j, k, offset=(50,50,100)):
x = i-offset[0]
y = j-offset[1]
z = k-offset[2]
th = np.arctan2(z, (x**2+y**2)**0.5)
phi = np.arctan2(y, x)
r = (x**2 + y**2 + z **2)**0.5
a0 = 2
#ps = (1./81.) * (2./np.pi)**0.5 * (1./a0)**(3/2) * (6 - r/a0) * (r/a0) * np.exp(-r/(3*a0)) * np.cos(th)
ps = (1./81.) * 1./(6.*np.pi)**0.5 * (1./a0)**(3/2) * (r/a0)**2 * np.exp(-r/(3*a0)) * (3 * np.cos(th)**2 - 1)
return ps
#return ((1./81.) * (1./np.pi)**0.5 * (1./a0)**(3/2) * (r/a0)**2 * (r/a0) * np.exp(-r/(3*a0)) * np.sin(th) * np.cos(th) * np.exp(2 * 1j * phi))**2
data = np.fromfunction(psi, (100,100,200))
positive = np.log(np.clip(data, 0, data.max())**2)
negative = np.log(np.clip(-data, 0, -data.min())**2)
d2 = np.empty(data.shape + (4,), dtype=np.ubyte)
d2[..., 0] = positive * (255./positive.max())
d2[..., 1] = negative * (255./negative.max())
d2[..., 2] = d2[...,1]
d2[..., 3] = d2[..., 0]*0.3 + d2[..., 1]*0.3
d2[..., 3] = (d2[..., 3].astype(float) / 255.) **2 * 255
d2[:, 0, 0] = [255,0,0,100]
d2[0, :, 0] = [0,255,0,100]
d2[0, 0, :] = [0,0,255,100]
v = gl.GLVolumeItem(d2)
v.translate(-50,-50,-100)
w.addItem(v)
ax = gl.GLAxisItem()
w.addItem(ax)
## Start Qt event loop unless running in interactive mode.
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
| mit | -246,080,326,094,674,560 | 27.521739 | 151 | 0.556402 | false |
ktan2020/legacy-automation | win/Lib/hotshot/log.py | 20 | 6433 | import _hotshot
import os.path
import parser
import symbol
from _hotshot import \
WHAT_ENTER, \
WHAT_EXIT, \
WHAT_LINENO, \
WHAT_DEFINE_FILE, \
WHAT_DEFINE_FUNC, \
WHAT_ADD_INFO
__all__ = ["LogReader", "ENTER", "EXIT", "LINE"]
ENTER = WHAT_ENTER
EXIT = WHAT_EXIT
LINE = WHAT_LINENO
class LogReader:
def __init__(self, logfn):
# fileno -> filename
self._filemap = {}
# (fileno, lineno) -> filename, funcname
self._funcmap = {}
self._reader = _hotshot.logreader(logfn)
self._nextitem = self._reader.next
self._info = self._reader.info
if 'current-directory' in self._info:
self.cwd = self._info['current-directory']
else:
self.cwd = None
# This mirrors the call stack of the profiled code as the log
# is read back in. It contains tuples of the form:
#
# (file name, line number of function def, function name)
#
self._stack = []
self._append = self._stack.append
self._pop = self._stack.pop
def close(self):
self._reader.close()
def fileno(self):
"""Return the file descriptor of the log reader's log file."""
return self._reader.fileno()
def addinfo(self, key, value):
"""This method is called for each additional ADD_INFO record.
This can be overridden by applications that want to receive
these events. The default implementation does not need to be
called by alternate implementations.
The initial set of ADD_INFO records do not pass through this
mechanism; this is only needed to receive notification when
new values are added. Subclasses can inspect self._info after
calling LogReader.__init__().
"""
pass
def get_filename(self, fileno):
try:
return self._filemap[fileno]
except KeyError:
raise ValueError, "unknown fileno"
def get_filenames(self):
return self._filemap.values()
def get_fileno(self, filename):
filename = os.path.normcase(os.path.normpath(filename))
for fileno, name in self._filemap.items():
if name == filename:
return fileno
raise ValueError, "unknown filename"
def get_funcname(self, fileno, lineno):
try:
return self._funcmap[(fileno, lineno)]
except KeyError:
raise ValueError, "unknown function location"
# Iteration support:
# This adds an optional (& ignored) parameter to next() so that the
# same bound method can be used as the __getitem__() method -- this
# avoids using an additional method call which kills the performance.
def next(self, index=0):
while 1:
# This call may raise StopIteration:
what, tdelta, fileno, lineno = self._nextitem()
# handle the most common cases first
if what == WHAT_ENTER:
filename, funcname = self._decode_location(fileno, lineno)
t = (filename, lineno, funcname)
self._append(t)
return what, t, tdelta
if what == WHAT_EXIT:
try:
return what, self._pop(), tdelta
except IndexError:
raise StopIteration
if what == WHAT_LINENO:
filename, firstlineno, funcname = self._stack[-1]
return what, (filename, lineno, funcname), tdelta
if what == WHAT_DEFINE_FILE:
filename = os.path.normcase(os.path.normpath(tdelta))
self._filemap[fileno] = filename
elif what == WHAT_DEFINE_FUNC:
filename = self._filemap[fileno]
self._funcmap[(fileno, lineno)] = (filename, tdelta)
elif what == WHAT_ADD_INFO:
# value already loaded into self.info; call the
# overridable addinfo() handler so higher-level code
# can pick up the new value
if tdelta == 'current-directory':
self.cwd = lineno
self.addinfo(tdelta, lineno)
else:
raise ValueError, "unknown event type"
def __iter__(self):
return self
#
# helpers
#
def _decode_location(self, fileno, lineno):
try:
return self._funcmap[(fileno, lineno)]
except KeyError:
#
# This should only be needed when the log file does not
# contain all the DEFINE_FUNC records needed to allow the
# function name to be retrieved from the log file.
#
if self._loadfile(fileno):
filename = funcname = None
try:
filename, funcname = self._funcmap[(fileno, lineno)]
except KeyError:
filename = self._filemap.get(fileno)
funcname = None
self._funcmap[(fileno, lineno)] = (filename, funcname)
return filename, funcname
def _loadfile(self, fileno):
try:
filename = self._filemap[fileno]
except KeyError:
print "Could not identify fileId", fileno
return 1
if filename is None:
return 1
absname = os.path.normcase(os.path.join(self.cwd, filename))
try:
fp = open(absname)
except IOError:
return
st = parser.suite(fp.read())
fp.close()
# Scan the tree looking for def and lambda nodes, filling in
# self._funcmap with all the available information.
funcdef = symbol.funcdef
lambdef = symbol.lambdef
stack = [st.totuple(1)]
while stack:
tree = stack.pop()
try:
sym = tree[0]
except (IndexError, TypeError):
continue
if sym == funcdef:
self._funcmap[(fileno, tree[2][2])] = filename, tree[2][1]
elif sym == lambdef:
self._funcmap[(fileno, tree[1][2])] = filename, "<lambda>"
stack.extend(list(tree[1:]))
| mit | -1,654,632,130,999,827,000 | 31.159794 | 74 | 0.535986 | false |
e-dorigatti/pyspider | pyspider/libs/multiprocessing_queue.py | 14 | 2808 | import six
import platform
import multiprocessing
from multiprocessing.queues import Queue as BaseQueue
# The SharedCounter and Queue classes come from:
# https://github.com/vterron/lemon/commit/9ca6b4b
class SharedCounter(object):
""" A synchronized shared counter.
The locking done by multiprocessing.Value ensures that only a single
process or thread may read or write the in-memory ctypes object. However,
in order to do n += 1, Python performs a read followed by a write, so a
second process may read the old value before the new one is written by the
first process. The solution is to use a multiprocessing.Lock to guarantee
the atomicity of the modifications to Value.
This class comes almost entirely from Eli Bendersky's blog:
http://eli.thegreenplace.net/2012/01/04/shared-counter-with-pythons-multiprocessing/
"""
def __init__(self, n=0):
self.count = multiprocessing.Value('i', n)
def increment(self, n=1):
""" Increment the counter by n (default = 1) """
with self.count.get_lock():
self.count.value += n
@property
def value(self):
""" Return the value of the counter """
return self.count.value
class MultiProcessingQueue(BaseQueue):
""" A portable implementation of multiprocessing.Queue.
Because of multithreading / multiprocessing semantics, Queue.qsize() may
raise the NotImplementedError exception on Unix platforms like Mac OS X
where sem_getvalue() is not implemented. This subclass addresses this
problem by using a synchronized shared counter (initialized to zero) and
increasing / decreasing its value every time the put() and get() methods
are called, respectively. This not only prevents NotImplementedError from
being raised, but also allows us to implement a reliable version of both
qsize() and empty().
"""
def __init__(self, *args, **kwargs):
super(MultiProcessingQueue, self).__init__(*args, **kwargs)
self.size = SharedCounter(0)
def put(self, *args, **kwargs):
self.size.increment(1)
super(MultiProcessingQueue, self).put(*args, **kwargs)
def get(self, *args, **kwargs):
v = super(MultiProcessingQueue, self).get(*args, **kwargs)
self.size.increment(-1)
return v
def qsize(self):
""" Reliable implementation of multiprocessing.Queue.qsize() """
return self.size.value
if platform.system() == 'Darwin':
if hasattr(multiprocessing, 'get_context'): # for py34
def Queue(maxsize=0):
return MultiProcessingQueue(maxsize, ctx=multiprocessing.get_context())
else:
def Queue(maxsize=0):
return MultiProcessingQueue(maxsize)
else:
from multiprocessing import Queue # flake8: noqa
| apache-2.0 | 761,477,861,795,602,600 | 37.465753 | 88 | 0.689459 | false |
zoeyangyy/event-extraction | tf_test/lstm-pos.py | 1 | 7622 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import gensim
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
from tensorflow.contrib import rnn
import numpy as np
'''
For Chinese word segmentation.
https://github.com/yongyehuang/Tensorflow-Tutorial/blob/master/Tutorial_6%20-%20Bi-directional%20LSTM%20for%20sequence%20labeling%20(Chinese%20segmentation).ipynb
'''
# ##################### config ######################
decay = 0.85
max_epoch = 5
max_max_epoch = 10
timestep_size = max_len = 32 # 句子长度
vocab_size = 5159 # 样本中不同字的个数,根据处理数据的时候得到
input_size = embedding_size = 100 # 字向量长度
class_num = 5
hidden_size = 128 # 隐含层节点数
layer_num = 2 # bi-lstm 层数
max_grad_norm = 5.0 # 最大梯度(超过此值的梯度将被裁剪)
lr = tf.placeholder(tf.float32)
keep_prob = tf.placeholder(tf.float32)
batch_size = 128 # 注意类型必须为 tf.int32
model_save_path = 'my_net/bi-lstm.ckpt' # 模型保存位置
def weight_variable(shape):
"""Create a weight variable with appropriate initialization."""
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
"""Create a bias variable with appropriate initialization."""
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
X_inputs = tf.placeholder(tf.int32, [None, timestep_size], name='X_input')
y_inputs = tf.placeholder(tf.int32, [None, timestep_size], name='y_input')
model = gensim.models.Word2Vec.load("../raw_file/text100.model")
def bi_lstm(X_inputs):
"""build the bi-LSTMs network. Return the y_pred"""
# ** 0.char embedding,请自行理解 embedding 的原理!!做 NLP 的朋友必须理解这个
embedding = tf.get_variable("embedding", [vocab_size, embedding_size], dtype=tf.float32)
# X_inputs.shape = [batchsize, timestep_size] -> inputs.shape = [batchsize, timestep_size, embedding_size]
inputs = tf.nn.embedding_lookup(embedding, X_inputs)
# ** 1.LSTM 层
# lstm_fw_cell = rnn.BasicLSTMCell(hidden_size, forget_bias=1.0, state_is_tuple=True)
# lstm_bw_cell = rnn.BasicLSTMCell(hidden_size, forget_bias=1.0, state_is_tuple=True)
# # ** 2.dropout
# lstm_fw_cell = rnn.DropoutWrapper(cell=lstm_fw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob)
# lstm_bw_cell = rnn.DropoutWrapper(cell=lstm_bw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob)
# ** 3.多层 LSTM
stacked_fw = []
for i in range(layer_num):
lstm_fw_cell = rnn.BasicLSTMCell(num_units=hidden_size, forget_bias=1.0, state_is_tuple=True)
stacked_fw.append(rnn.DropoutWrapper(cell=lstm_fw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob))
stacked_bw = []
for i in range(layer_num):
lstm_bw_cell = rnn.BasicLSTMCell(num_units=hidden_size, forget_bias=1.0, state_is_tuple=True)
stacked_bw.append(rnn.DropoutWrapper(cell=lstm_bw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob))
cell_fw = rnn.MultiRNNCell(cells=stacked_fw, state_is_tuple=True)
cell_bw = rnn.MultiRNNCell(cells=stacked_bw, state_is_tuple=True)
# ** 4.初始状态
initial_state_fw = cell_fw.zero_state(batch_size, tf.float32)
initial_state_bw = cell_bw.zero_state(batch_size, tf.float32)
# 下面两部分是等价的
# **************************************************************
# ** 把 inputs 处理成 rnn.static_bidirectional_rnn 的要求形式
# ** 文档说明
# inputs: A length T list of inputs, each a tensor of shape
# [batch_size, input_size], or a nested tuple of such elements.
# *************************************************************
# Unstack to get a list of 'n_steps' tensors of shape (batch_size, n_input)
# inputs.shape = [batchsize, timestep_size, embedding_size] -> timestep_size tensor, each_tensor.shape = [batchsize, embedding_size]
# inputs = tf.unstack(inputs, timestep_size, 1)
# ** 5.bi-lstm 计算(tf封装) 一般采用下面 static_bidirectional_rnn 函数调用。
# 但是为了理解计算的细节,所以把后面的这段代码进行展开自己实现了一遍。
# try:
# outputs, _, _ = rnn.static_bidirectional_rnn(cell_fw, cell_bw, inputs,
# initial_state_fw = initial_state_fw, initial_state_bw = initial_state_bw, dtype=tf.float32)
# except Exception: # Old TensorFlow version only returns outputs not states
# outputs = rnn.static_bidirectional_rnn(cell_fw, cell_bw, inputs,
# initial_state_fw = initial_state_fw, initial_state_bw = initial_state_bw, dtype=tf.float32)
# output = tf.reshape(tf.concat(outputs, 1), [-1, hidden_size * 2])
# ***********************************************************
# ***********************************************************
# ** 5. bi-lstm 计算(展开)
with tf.variable_scope('bidirectional_rnn'):
# *** 下面,两个网络是分别计算 output 和 state
# Forward direction
outputs_fw = list()
state_fw = initial_state_fw
with tf.variable_scope('fw'):
for timestep in range(timestep_size):
if timestep > 0:
tf.get_variable_scope().reuse_variables()
(output_fw, state_fw) = cell_fw(inputs[:, timestep, :], state_fw)
outputs_fw.append(output_fw)
# backward direction
outputs_bw = list()
state_bw = initial_state_bw
with tf.variable_scope('bw') as bw_scope:
inputs = tf.reverse(inputs, [1])
for timestep in range(timestep_size):
if timestep > 0:
tf.get_variable_scope().reuse_variables()
(output_bw, state_bw) = cell_bw(inputs[:, timestep, :], state_bw)
outputs_bw.append(output_bw)
# *** 然后把 output_bw 在 timestep 维度进行翻转
# outputs_bw.shape = [timestep_size, batch_size, hidden_size]
outputs_bw = tf.reverse(outputs_bw, [0])
# 把两个oupputs 拼成 [timestep_size, batch_size, hidden_size*2]
output = tf.concat([outputs_fw, outputs_bw], 2)
# output.shape 必须和 y_input.shape=[batch_size,timestep_size] 对齐
output = tf.transpose(output, perm=[1,0,2])
output = tf.reshape(output, [-1, hidden_size*2])
# ***********************************************************
softmax_w = weight_variable([hidden_size * 2, class_num])
softmax_b = bias_variable([class_num])
logits = tf.matmul(output, softmax_w) + softmax_b
return logits
y_pred = bi_lstm(X_inputs)
# adding extra statistics to monitor
# y_inputs.shape = [batch_size, timestep_size]
correct_prediction = tf.equal(tf.cast(tf.argmax(y_pred, 1), tf.int32), tf.reshape(y_inputs, [-1]))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
cost = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels = tf.reshape(y_inputs, [-1]), logits = y_pred))
# ***** 优化求解 *******
# 获取模型的所有参数
tvars = tf.trainable_variables()
# 获取损失函数对于每个参数的梯度
grads, _ = tf.clip_by_global_norm(tf.gradients(cost, tvars), max_grad_norm)
# 优化器
optimizer = tf.train.AdamOptimizer(learning_rate=lr)
# 梯度下降计算
train_op = optimizer.apply_gradients( zip(grads, tvars),
global_step=tf.contrib.framework.get_or_create_global_step())
print('Finished creating the bi-lstm model.') | mit | -2,135,096,933,725,471,700 | 44.14557 | 162 | 0.626192 | false |
longman694/youtube-dl | youtube_dl/extractor/tass.py | 64 | 2016 | # coding: utf-8
from __future__ import unicode_literals
import json
from .common import InfoExtractor
from ..utils import (
js_to_json,
qualities,
)
class TassIE(InfoExtractor):
_VALID_URL = r'https?://(?:tass\.ru|itar-tass\.com)/[^/]+/(?P<id>\d+)'
_TESTS = [
{
'url': 'http://tass.ru/obschestvo/1586870',
'md5': '3b4cdd011bc59174596b6145cda474a4',
'info_dict': {
'id': '1586870',
'ext': 'mp4',
'title': 'Посетителям московского зоопарка показали красную панду',
'description': 'Приехавшую из Дублина Зейну можно увидеть в павильоне "Кошки тропиков"',
'thumbnail': r're:^https?://.*\.jpg$',
},
},
{
'url': 'http://itar-tass.com/obschestvo/1600009',
'only_matching': True,
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
sources = json.loads(js_to_json(self._search_regex(
r'(?s)sources\s*:\s*(\[.+?\])', webpage, 'sources')))
quality = qualities(['sd', 'hd'])
formats = []
for source in sources:
video_url = source.get('file')
if not video_url or not video_url.startswith('http') or not video_url.endswith('.mp4'):
continue
label = source.get('label')
formats.append({
'url': video_url,
'format_id': label,
'quality': quality(label),
})
self._sort_formats(formats)
return {
'id': video_id,
'title': self._og_search_title(webpage),
'description': self._og_search_description(webpage),
'thumbnail': self._og_search_thumbnail(webpage),
'formats': formats,
}
| unlicense | 3,388,902,929,332,979,000 | 29.758065 | 104 | 0.510225 | false |
ryfeus/lambda-packs | Tensorflow_OpenCV_Nightly/source/tensorflow/contrib/keras/python/keras/applications/vgg16.py | 30 | 9077 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
"""VGG16 model for Keras.
# Reference
- [Very Deep Convolutional Networks for Large-Scale Image
Recognition](https://arxiv.org/abs/1409.1556)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
from tensorflow.contrib.keras.python.keras import backend as K
from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import decode_predictions # pylint: disable=unused-import
from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import preprocess_input # pylint: disable=unused-import
from tensorflow.contrib.keras.python.keras.engine.topology import get_source_inputs
from tensorflow.contrib.keras.python.keras.layers import Conv2D
from tensorflow.contrib.keras.python.keras.layers import Dense
from tensorflow.contrib.keras.python.keras.layers import Flatten
from tensorflow.contrib.keras.python.keras.layers import GlobalAveragePooling2D
from tensorflow.contrib.keras.python.keras.layers import GlobalMaxPooling2D
from tensorflow.contrib.keras.python.keras.layers import Input
from tensorflow.contrib.keras.python.keras.layers import MaxPooling2D
from tensorflow.contrib.keras.python.keras.models import Model
from tensorflow.contrib.keras.python.keras.utils import layer_utils
from tensorflow.contrib.keras.python.keras.utils.data_utils import get_file
WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5'
WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5'
def VGG16(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000):
"""Instantiates the VGG16 architecture.
Optionally loads weights pre-trained
on ImageNet. Note that when using TensorFlow,
for best performance you should set
`image_data_format="channels_last"` in your Keras config
at ~/.keras/keras.json.
The model and the weights are compatible with both
TensorFlow and Theano. The data format
convention used by the model is the one
specified in your Keras config file.
Arguments:
include_top: whether to include the 3 fully-connected
layers at the top of the network.
weights: one of `None` (random initialization)
or "imagenet" (pre-training on ImageNet).
input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
to use as image input for the model.
input_shape: optional shape tuple, only to be specified
if `include_top` is False (otherwise the input shape
has to be `(224, 224, 3)` (with `channels_last` data format)
or `(3, 224, 244)` (with `channels_first` data format).
It should have exactly 3 inputs channels,
and width and height should be no smaller than 48.
E.g. `(200, 200, 3)` would be one valid value.
pooling: Optional pooling mode for feature extraction
when `include_top` is `False`.
- `None` means that the output of the model will be
the 4D tensor output of the
last convolutional layer.
- `avg` means that global average pooling
will be applied to the output of the
last convolutional layer, and thus
the output of the model will be a 2D tensor.
- `max` means that global max pooling will
be applied.
classes: optional number of classes to classify images
into, only to be specified if `include_top` is True, and
if no `weights` argument is specified.
Returns:
A Keras model instance.
Raises:
ValueError: in case of invalid argument for `weights`,
or invalid input shape.
"""
if weights not in {'imagenet', None}:
raise ValueError('The `weights` argument should be either '
'`None` (random initialization) or `imagenet` '
'(pre-training on ImageNet).')
if weights == 'imagenet' and include_top and classes != 1000:
raise ValueError('If using `weights` as imagenet with `include_top`'
' as true, `classes` should be 1000')
# Determine proper input shape
input_shape = _obtain_input_shape(
input_shape,
default_size=224,
min_size=48,
data_format=K.image_data_format(),
include_top=include_top)
if input_tensor is None:
img_input = Input(shape=input_shape)
else:
img_input = Input(tensor=input_tensor, shape=input_shape)
# Block 1
x = Conv2D(
64, (3, 3), activation='relu', padding='same',
name='block1_conv1')(img_input)
x = Conv2D(
64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)
# Block 2
x = Conv2D(
128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x)
x = Conv2D(
128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)
# Block 3
x = Conv2D(
256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x)
x = Conv2D(
256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x)
x = Conv2D(
256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)
# Block 4
x = Conv2D(
512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x)
x = Conv2D(
512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x)
x = Conv2D(
512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)
# Block 5
x = Conv2D(
512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x)
x = Conv2D(
512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x)
x = Conv2D(
512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x)
if include_top:
# Classification block
x = Flatten(name='flatten')(x)
x = Dense(4096, activation='relu', name='fc1')(x)
x = Dense(4096, activation='relu', name='fc2')(x)
x = Dense(classes, activation='softmax', name='predictions')(x)
else:
if pooling == 'avg':
x = GlobalAveragePooling2D()(x)
elif pooling == 'max':
x = GlobalMaxPooling2D()(x)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = Model(inputs, x, name='vgg16')
# load weights
if weights == 'imagenet':
if include_top:
weights_path = get_file(
'vgg16_weights_tf_dim_ordering_tf_kernels.h5',
WEIGHTS_PATH,
cache_subdir='models')
else:
weights_path = get_file(
'vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5',
WEIGHTS_PATH_NO_TOP,
cache_subdir='models')
model.load_weights(weights_path)
if K.backend() == 'theano':
layer_utils.convert_all_kernels_in_model(model)
if K.image_data_format() == 'channels_first':
if include_top:
maxpool = model.get_layer(name='block5_pool')
shape = maxpool.output_shape[1:]
dense = model.get_layer(name='fc1')
layer_utils.convert_dense_weights_data_format(dense, shape,
'channels_first')
if K.backend() == 'tensorflow':
warnings.warn('You are using the TensorFlow backend, yet you '
'are using the Theano '
'image data format convention '
'(`image_data_format="channels_first"`). '
'For best performance, set '
'`image_data_format="channels_last"` in '
'your Keras config '
'at ~/.keras/keras.json.')
return model
| mit | 4,905,423,992,466,024,000 | 39.704036 | 145 | 0.649884 | false |
iivic/BoiseStateX | common/test/acceptance/tests/studio/test_studio_general.py | 105 | 5669 | """
Acceptance tests for Studio.
"""
from unittest import skip
from bok_choy.web_app_test import WebAppTest
from ...pages.studio.asset_index import AssetIndexPage
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.studio.checklists import ChecklistsPage
from ...pages.studio.course_info import CourseUpdatesPage
from ...pages.studio.edit_tabs import PagesPage
from ...pages.studio.import_export import ExportCoursePage, ImportCoursePage
from ...pages.studio.howitworks import HowitworksPage
from ...pages.studio.index import DashboardPage
from ...pages.studio.login import LoginPage
from ...pages.studio.users import CourseTeamPage
from ...pages.studio.overview import CourseOutlinePage
from ...pages.studio.settings import SettingsPage
from ...pages.studio.settings_advanced import AdvancedSettingsPage
from ...pages.studio.settings_graders import GradingPage
from ...pages.studio.signup import SignupPage
from ...pages.studio.textbooks import TextbooksPage
from ...fixtures.course import XBlockFixtureDesc
from base_studio_test import StudioCourseTest
class LoggedOutTest(WebAppTest):
"""
Smoke test for pages in Studio that are visible when logged out.
"""
def setUp(self):
super(LoggedOutTest, self).setUp()
self.pages = [LoginPage(self.browser), HowitworksPage(self.browser), SignupPage(self.browser)]
def test_page_existence(self):
"""
Make sure that all the pages are accessible.
Rather than fire up the browser just to check each url,
do them all sequentially in this testcase.
"""
for page in self.pages:
page.visit()
class LoggedInPagesTest(WebAppTest):
"""
Tests that verify the pages in Studio that you can get to when logged
in and do not have a course yet.
"""
def setUp(self):
super(LoggedInPagesTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
def test_dashboard_no_courses(self):
"""
Make sure that you can get to the dashboard page without a course.
"""
self.auth_page.visit()
self.dashboard_page.visit()
class CoursePagesTest(StudioCourseTest):
"""
Tests that verify the pages in Studio that you can get to when logged
in and have a course.
"""
COURSE_ID_SEPARATOR = "."
def setUp(self):
"""
Install a course with no content using a fixture.
"""
super(CoursePagesTest, self).setUp()
self.pages = [
clz(self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run'])
for clz in [
AssetIndexPage, ChecklistsPage, CourseUpdatesPage,
PagesPage, ExportCoursePage, ImportCoursePage, CourseTeamPage, CourseOutlinePage, SettingsPage,
AdvancedSettingsPage, GradingPage, TextbooksPage
]
]
def test_page_redirect(self):
"""
/course/ is the base URL for all courses, but by itself, it should
redirect to /home/.
"""
self.dashboard_page = DashboardPage(self.browser) # pylint: disable=attribute-defined-outside-init
self.dashboard_page.visit()
self.assertEqual(self.browser.current_url.strip('/').rsplit('/')[-1], 'home')
@skip('Intermittently failing with Page not found error for Assets. TE-418')
def test_page_existence(self):
"""
Make sure that all these pages are accessible once you have a course.
Rather than fire up the browser just to check each url,
do them all sequentially in this testcase.
"""
# In the real workflow you will be at the dashboard page
# after you log in. This test was intermittently failing on the
# first (asset) page load with a 404.
# Not exactly sure why, so adding in a visit
# to the dashboard page here to replicate the usual flow.
self.dashboard_page = DashboardPage(self.browser)
self.dashboard_page.visit()
# Verify that each page is available
for page in self.pages:
page.visit()
class DiscussionPreviewTest(StudioCourseTest):
"""
Tests that Inline Discussions are rendered with a custom preview in Studio
"""
def setUp(self):
super(DiscussionPreviewTest, self).setUp()
cop = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
cop.visit()
self.unit = cop.section('Test Section').subsection('Test Subsection').expand_subsection().unit('Test Unit')
self.unit.go_to()
def populate_course_fixture(self, course_fixture):
"""
Return a test course fixture containing a discussion component.
"""
course_fixture.add_children(
XBlockFixtureDesc("chapter", "Test Section").add_children(
XBlockFixtureDesc("sequential", "Test Subsection").add_children(
XBlockFixtureDesc("vertical", "Test Unit").add_children(
XBlockFixtureDesc(
"discussion",
"Test Discussion",
)
)
)
)
)
def test_is_preview(self):
"""
Ensure that the preview version of the discussion is rendered.
"""
self.assertTrue(self.unit.q(css=".discussion-preview").present)
self.assertFalse(self.unit.q(css=".discussion-show").present)
| agpl-3.0 | -7,413,236,907,907,949,000 | 34.654088 | 115 | 0.643676 | false |
cloudfoundry/php-buildpack-legacy | builds/runtimes/python-2.7.6/lib/python2.7/test/test_posixpath.py | 71 | 17716 | import unittest
from test import test_support, test_genericpath
import posixpath, os
from posixpath import realpath, abspath, dirname, basename
# An absolute path to a temporary filename for testing. We can't rely on TESTFN
# being an absolute path, so we need this.
ABSTFN = abspath(test_support.TESTFN)
def skip_if_ABSTFN_contains_backslash(test):
"""
On Windows, posixpath.abspath still returns paths with backslashes
instead of posix forward slashes. If this is the case, several tests
fail, so skip them.
"""
found_backslash = '\\' in ABSTFN
msg = "ABSTFN is not a posix path - tests fail"
return [test, unittest.skip(msg)(test)][found_backslash]
def safe_rmdir(dirname):
try:
os.rmdir(dirname)
except OSError:
pass
class PosixPathTest(unittest.TestCase):
def setUp(self):
self.tearDown()
def tearDown(self):
for suffix in ["", "1", "2"]:
test_support.unlink(test_support.TESTFN + suffix)
safe_rmdir(test_support.TESTFN + suffix)
def test_join(self):
self.assertEqual(posixpath.join("/foo", "bar", "/bar", "baz"), "/bar/baz")
self.assertEqual(posixpath.join("/foo", "bar", "baz"), "/foo/bar/baz")
self.assertEqual(posixpath.join("/foo/", "bar/", "baz/"), "/foo/bar/baz/")
def test_split(self):
self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
self.assertEqual(posixpath.split("/"), ("/", ""))
self.assertEqual(posixpath.split("foo"), ("", "foo"))
self.assertEqual(posixpath.split("////foo"), ("////", "foo"))
self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar"))
def splitextTest(self, path, filename, ext):
self.assertEqual(posixpath.splitext(path), (filename, ext))
self.assertEqual(posixpath.splitext("/" + path), ("/" + filename, ext))
self.assertEqual(posixpath.splitext("abc/" + path), ("abc/" + filename, ext))
self.assertEqual(posixpath.splitext("abc.def/" + path), ("abc.def/" + filename, ext))
self.assertEqual(posixpath.splitext("/abc.def/" + path), ("/abc.def/" + filename, ext))
self.assertEqual(posixpath.splitext(path + "/"), (filename + ext + "/", ""))
def test_splitext(self):
self.splitextTest("foo.bar", "foo", ".bar")
self.splitextTest("foo.boo.bar", "foo.boo", ".bar")
self.splitextTest("foo.boo.biff.bar", "foo.boo.biff", ".bar")
self.splitextTest(".csh.rc", ".csh", ".rc")
self.splitextTest("nodots", "nodots", "")
self.splitextTest(".cshrc", ".cshrc", "")
self.splitextTest("...manydots", "...manydots", "")
self.splitextTest("...manydots.ext", "...manydots", ".ext")
self.splitextTest(".", ".", "")
self.splitextTest("..", "..", "")
self.splitextTest("........", "........", "")
self.splitextTest("", "", "")
def test_isabs(self):
self.assertIs(posixpath.isabs(""), False)
self.assertIs(posixpath.isabs("/"), True)
self.assertIs(posixpath.isabs("/foo"), True)
self.assertIs(posixpath.isabs("/foo/bar"), True)
self.assertIs(posixpath.isabs("foo/bar"), False)
def test_basename(self):
self.assertEqual(posixpath.basename("/foo/bar"), "bar")
self.assertEqual(posixpath.basename("/"), "")
self.assertEqual(posixpath.basename("foo"), "foo")
self.assertEqual(posixpath.basename("////foo"), "foo")
self.assertEqual(posixpath.basename("//foo//bar"), "bar")
def test_dirname(self):
self.assertEqual(posixpath.dirname("/foo/bar"), "/foo")
self.assertEqual(posixpath.dirname("/"), "/")
self.assertEqual(posixpath.dirname("foo"), "")
self.assertEqual(posixpath.dirname("////foo"), "////")
self.assertEqual(posixpath.dirname("//foo//bar"), "//foo")
def test_islink(self):
self.assertIs(posixpath.islink(test_support.TESTFN + "1"), False)
f = open(test_support.TESTFN + "1", "wb")
try:
f.write("foo")
f.close()
self.assertIs(posixpath.islink(test_support.TESTFN + "1"), False)
if hasattr(os, "symlink"):
os.symlink(test_support.TESTFN + "1", test_support.TESTFN + "2")
self.assertIs(posixpath.islink(test_support.TESTFN + "2"), True)
os.remove(test_support.TESTFN + "1")
self.assertIs(posixpath.islink(test_support.TESTFN + "2"), True)
self.assertIs(posixpath.exists(test_support.TESTFN + "2"), False)
self.assertIs(posixpath.lexists(test_support.TESTFN + "2"), True)
finally:
if not f.close():
f.close()
def test_samefile(self):
f = open(test_support.TESTFN + "1", "wb")
try:
f.write("foo")
f.close()
self.assertIs(
posixpath.samefile(
test_support.TESTFN + "1",
test_support.TESTFN + "1"
),
True
)
# If we don't have links, assume that os.stat doesn't return
# reasonable inode information and thus, that samefile() doesn't
# work.
if hasattr(os, "symlink"):
os.symlink(
test_support.TESTFN + "1",
test_support.TESTFN + "2"
)
self.assertIs(
posixpath.samefile(
test_support.TESTFN + "1",
test_support.TESTFN + "2"
),
True
)
os.remove(test_support.TESTFN + "2")
f = open(test_support.TESTFN + "2", "wb")
f.write("bar")
f.close()
self.assertIs(
posixpath.samefile(
test_support.TESTFN + "1",
test_support.TESTFN + "2"
),
False
)
finally:
if not f.close():
f.close()
def test_samestat(self):
f = open(test_support.TESTFN + "1", "wb")
try:
f.write("foo")
f.close()
self.assertIs(
posixpath.samestat(
os.stat(test_support.TESTFN + "1"),
os.stat(test_support.TESTFN + "1")
),
True
)
# If we don't have links, assume that os.stat() doesn't return
# reasonable inode information and thus, that samestat() doesn't
# work.
if hasattr(os, "symlink"):
os.symlink(test_support.TESTFN + "1", test_support.TESTFN + "2")
self.assertIs(
posixpath.samestat(
os.stat(test_support.TESTFN + "1"),
os.stat(test_support.TESTFN + "2")
),
True
)
os.remove(test_support.TESTFN + "2")
f = open(test_support.TESTFN + "2", "wb")
f.write("bar")
f.close()
self.assertIs(
posixpath.samestat(
os.stat(test_support.TESTFN + "1"),
os.stat(test_support.TESTFN + "2")
),
False
)
finally:
if not f.close():
f.close()
def test_ismount(self):
self.assertIs(posixpath.ismount("/"), True)
def test_expanduser(self):
self.assertEqual(posixpath.expanduser("foo"), "foo")
try:
import pwd
except ImportError:
pass
else:
self.assertIsInstance(posixpath.expanduser("~/"), basestring)
# if home directory == root directory, this test makes no sense
if posixpath.expanduser("~") != '/':
self.assertEqual(
posixpath.expanduser("~") + "/",
posixpath.expanduser("~/")
)
self.assertIsInstance(posixpath.expanduser("~root/"), basestring)
self.assertIsInstance(posixpath.expanduser("~foo/"), basestring)
with test_support.EnvironmentVarGuard() as env:
env['HOME'] = '/'
self.assertEqual(posixpath.expanduser("~"), "/")
self.assertEqual(posixpath.expanduser("~/foo"), "/foo")
def test_normpath(self):
self.assertEqual(posixpath.normpath(""), ".")
self.assertEqual(posixpath.normpath("/"), "/")
self.assertEqual(posixpath.normpath("//"), "//")
self.assertEqual(posixpath.normpath("///"), "/")
self.assertEqual(posixpath.normpath("///foo/.//bar//"), "/foo/bar")
self.assertEqual(posixpath.normpath("///foo/.//bar//.//..//.//baz"), "/foo/baz")
self.assertEqual(posixpath.normpath("///..//./foo/.//bar"), "/foo/bar")
@skip_if_ABSTFN_contains_backslash
def test_realpath_curdir(self):
self.assertEqual(realpath('.'), os.getcwd())
self.assertEqual(realpath('./.'), os.getcwd())
self.assertEqual(realpath('/'.join(['.'] * 100)), os.getcwd())
@skip_if_ABSTFN_contains_backslash
def test_realpath_pardir(self):
self.assertEqual(realpath('..'), dirname(os.getcwd()))
self.assertEqual(realpath('../..'), dirname(dirname(os.getcwd())))
self.assertEqual(realpath('/'.join(['..'] * 100)), '/')
if hasattr(os, "symlink"):
def test_realpath_basic(self):
# Basic operation.
try:
os.symlink(ABSTFN+"1", ABSTFN)
self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
finally:
test_support.unlink(ABSTFN)
def test_realpath_symlink_loops(self):
# Bug #930024, return the path unchanged if we get into an infinite
# symlink loop.
try:
old_path = abspath('.')
os.symlink(ABSTFN, ABSTFN)
self.assertEqual(realpath(ABSTFN), ABSTFN)
os.symlink(ABSTFN+"1", ABSTFN+"2")
os.symlink(ABSTFN+"2", ABSTFN+"1")
self.assertEqual(realpath(ABSTFN+"1"), ABSTFN+"1")
self.assertEqual(realpath(ABSTFN+"2"), ABSTFN+"2")
self.assertEqual(realpath(ABSTFN+"1/x"), ABSTFN+"1/x")
self.assertEqual(realpath(ABSTFN+"1/.."), dirname(ABSTFN))
self.assertEqual(realpath(ABSTFN+"1/../x"), dirname(ABSTFN) + "/x")
os.symlink(ABSTFN+"x", ABSTFN+"y")
self.assertEqual(realpath(ABSTFN+"1/../" + basename(ABSTFN) + "y"),
ABSTFN + "y")
self.assertEqual(realpath(ABSTFN+"1/../" + basename(ABSTFN) + "1"),
ABSTFN + "1")
os.symlink(basename(ABSTFN) + "a/b", ABSTFN+"a")
self.assertEqual(realpath(ABSTFN+"a"), ABSTFN+"a/b")
os.symlink("../" + basename(dirname(ABSTFN)) + "/" +
basename(ABSTFN) + "c", ABSTFN+"c")
self.assertEqual(realpath(ABSTFN+"c"), ABSTFN+"c")
# Test using relative path as well.
os.chdir(dirname(ABSTFN))
self.assertEqual(realpath(basename(ABSTFN)), ABSTFN)
finally:
os.chdir(old_path)
test_support.unlink(ABSTFN)
test_support.unlink(ABSTFN+"1")
test_support.unlink(ABSTFN+"2")
test_support.unlink(ABSTFN+"y")
test_support.unlink(ABSTFN+"c")
test_support.unlink(ABSTFN+"a")
def test_realpath_repeated_indirect_symlinks(self):
# Issue #6975.
try:
os.mkdir(ABSTFN)
os.symlink('../' + basename(ABSTFN), ABSTFN + '/self')
os.symlink('self/self/self', ABSTFN + '/link')
self.assertEqual(realpath(ABSTFN + '/link'), ABSTFN)
finally:
test_support.unlink(ABSTFN + '/self')
test_support.unlink(ABSTFN + '/link')
safe_rmdir(ABSTFN)
def test_realpath_deep_recursion(self):
depth = 10
old_path = abspath('.')
try:
os.mkdir(ABSTFN)
for i in range(depth):
os.symlink('/'.join(['%d' % i] * 10), ABSTFN + '/%d' % (i + 1))
os.symlink('.', ABSTFN + '/0')
self.assertEqual(realpath(ABSTFN + '/%d' % depth), ABSTFN)
# Test using relative path as well.
os.chdir(ABSTFN)
self.assertEqual(realpath('%d' % depth), ABSTFN)
finally:
os.chdir(old_path)
for i in range(depth + 1):
test_support.unlink(ABSTFN + '/%d' % i)
safe_rmdir(ABSTFN)
def test_realpath_resolve_parents(self):
# We also need to resolve any symlinks in the parents of a relative
# path passed to realpath. E.g.: current working directory is
# /usr/doc with 'doc' being a symlink to /usr/share/doc. We call
# realpath("a"). This should return /usr/share/doc/a/.
try:
old_path = abspath('.')
os.mkdir(ABSTFN)
os.mkdir(ABSTFN + "/y")
os.symlink(ABSTFN + "/y", ABSTFN + "/k")
os.chdir(ABSTFN + "/k")
self.assertEqual(realpath("a"), ABSTFN + "/y/a")
finally:
os.chdir(old_path)
test_support.unlink(ABSTFN + "/k")
safe_rmdir(ABSTFN + "/y")
safe_rmdir(ABSTFN)
def test_realpath_resolve_before_normalizing(self):
# Bug #990669: Symbolic links should be resolved before we
# normalize the path. E.g.: if we have directories 'a', 'k' and 'y'
# in the following hierarchy:
# a/k/y
#
# and a symbolic link 'link-y' pointing to 'y' in directory 'a',
# then realpath("link-y/..") should return 'k', not 'a'.
try:
old_path = abspath('.')
os.mkdir(ABSTFN)
os.mkdir(ABSTFN + "/k")
os.mkdir(ABSTFN + "/k/y")
os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y")
# Absolute path.
self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k")
# Relative path.
os.chdir(dirname(ABSTFN))
self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
ABSTFN + "/k")
finally:
os.chdir(old_path)
test_support.unlink(ABSTFN + "/link-y")
safe_rmdir(ABSTFN + "/k/y")
safe_rmdir(ABSTFN + "/k")
safe_rmdir(ABSTFN)
def test_realpath_resolve_first(self):
# Bug #1213894: The first component of the path, if not absolute,
# must be resolved too.
try:
old_path = abspath('.')
os.mkdir(ABSTFN)
os.mkdir(ABSTFN + "/k")
os.symlink(ABSTFN, ABSTFN + "link")
os.chdir(dirname(ABSTFN))
base = basename(ABSTFN)
self.assertEqual(realpath(base + "link"), ABSTFN)
self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
finally:
os.chdir(old_path)
test_support.unlink(ABSTFN + "link")
safe_rmdir(ABSTFN + "/k")
safe_rmdir(ABSTFN)
def test_relpath(self):
(real_getcwd, os.getcwd) = (os.getcwd, lambda: r"/home/user/bar")
try:
curdir = os.path.split(os.getcwd())[-1]
self.assertRaises(ValueError, posixpath.relpath, "")
self.assertEqual(posixpath.relpath("a"), "a")
self.assertEqual(posixpath.relpath(posixpath.abspath("a")), "a")
self.assertEqual(posixpath.relpath("a/b"), "a/b")
self.assertEqual(posixpath.relpath("../a/b"), "../a/b")
self.assertEqual(posixpath.relpath("a", "../b"), "../"+curdir+"/a")
self.assertEqual(posixpath.relpath("a/b", "../c"), "../"+curdir+"/a/b")
self.assertEqual(posixpath.relpath("a", "b/c"), "../../a")
self.assertEqual(posixpath.relpath("a", "a"), ".")
self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x/y/z"), '../../../foo/bar/bat')
self.assertEqual(posixpath.relpath("/foo/bar/bat", "/foo/bar"), 'bat')
self.assertEqual(posixpath.relpath("/foo/bar/bat", "/"), 'foo/bar/bat')
self.assertEqual(posixpath.relpath("/", "/foo/bar/bat"), '../../..')
self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x"), '../foo/bar/bat')
self.assertEqual(posixpath.relpath("/x", "/foo/bar/bat"), '../../../x')
self.assertEqual(posixpath.relpath("/", "/"), '.')
self.assertEqual(posixpath.relpath("/a", "/a"), '.')
self.assertEqual(posixpath.relpath("/a/b", "/a/b"), '.')
finally:
os.getcwd = real_getcwd
class PosixCommonTest(test_genericpath.CommonTest):
pathmodule = posixpath
attributes = ['relpath', 'samefile', 'sameopenfile', 'samestat']
def test_main():
test_support.run_unittest(PosixPathTest, PosixCommonTest)
if __name__=="__main__":
test_main()
| mit | 5,584,871,833,094,406,000 | 40.881797 | 97 | 0.510894 | false |
cloudfoundry/php-buildpack-legacy | builds/runtimes/python-2.7.6/lib/python2.7/test/test_cfgparser.py | 71 | 27744 | import ConfigParser
import StringIO
import os
import unittest
import UserDict
from test import test_support
class SortedDict(UserDict.UserDict):
def items(self):
result = self.data.items()
result.sort()
return result
def keys(self):
result = self.data.keys()
result.sort()
return result
def values(self):
# XXX never used?
result = self.items()
return [i[1] for i in result]
def iteritems(self): return iter(self.items())
def iterkeys(self): return iter(self.keys())
__iter__ = iterkeys
def itervalues(self): return iter(self.values())
class TestCaseBase(unittest.TestCase):
allow_no_value = False
def newconfig(self, defaults=None):
if defaults is None:
self.cf = self.config_class(allow_no_value=self.allow_no_value)
else:
self.cf = self.config_class(defaults,
allow_no_value=self.allow_no_value)
return self.cf
def fromstring(self, string, defaults=None):
cf = self.newconfig(defaults)
sio = StringIO.StringIO(string)
cf.readfp(sio)
return cf
def test_basic(self):
config_string = (
"[Foo Bar]\n"
"foo=bar\n"
"[Spacey Bar]\n"
"foo = bar\n"
"[Commented Bar]\n"
"foo: bar ; comment\n"
"[Long Line]\n"
"foo: this line is much, much longer than my editor\n"
" likes it.\n"
"[Section\\with$weird%characters[\t]\n"
"[Internationalized Stuff]\n"
"foo[bg]: Bulgarian\n"
"foo=Default\n"
"foo[en]=English\n"
"foo[de]=Deutsch\n"
"[Spaces]\n"
"key with spaces : value\n"
"another with spaces = splat!\n"
)
if self.allow_no_value:
config_string += (
"[NoValue]\n"
"option-without-value\n"
)
cf = self.fromstring(config_string)
L = cf.sections()
L.sort()
E = [r'Commented Bar',
r'Foo Bar',
r'Internationalized Stuff',
r'Long Line',
r'Section\with$weird%characters[' '\t',
r'Spaces',
r'Spacey Bar',
]
if self.allow_no_value:
E.append(r'NoValue')
E.sort()
eq = self.assertEqual
eq(L, E)
# The use of spaces in the section names serves as a
# regression test for SourceForge bug #583248:
# http://www.python.org/sf/583248
eq(cf.get('Foo Bar', 'foo'), 'bar')
eq(cf.get('Spacey Bar', 'foo'), 'bar')
eq(cf.get('Commented Bar', 'foo'), 'bar')
eq(cf.get('Spaces', 'key with spaces'), 'value')
eq(cf.get('Spaces', 'another with spaces'), 'splat!')
if self.allow_no_value:
eq(cf.get('NoValue', 'option-without-value'), None)
self.assertNotIn('__name__', cf.options("Foo Bar"),
'__name__ "option" should not be exposed by the API!')
# Make sure the right things happen for remove_option();
# added to include check for SourceForge bug #123324:
self.assertTrue(cf.remove_option('Foo Bar', 'foo'),
"remove_option() failed to report existence of option")
self.assertFalse(cf.has_option('Foo Bar', 'foo'),
"remove_option() failed to remove option")
self.assertFalse(cf.remove_option('Foo Bar', 'foo'),
"remove_option() failed to report non-existence of option"
" that was removed")
self.assertRaises(ConfigParser.NoSectionError,
cf.remove_option, 'No Such Section', 'foo')
eq(cf.get('Long Line', 'foo'),
'this line is much, much longer than my editor\nlikes it.')
def test_case_sensitivity(self):
cf = self.newconfig()
cf.add_section("A")
cf.add_section("a")
L = cf.sections()
L.sort()
eq = self.assertEqual
eq(L, ["A", "a"])
cf.set("a", "B", "value")
eq(cf.options("a"), ["b"])
eq(cf.get("a", "b"), "value",
"could not locate option, expecting case-insensitive option names")
self.assertTrue(cf.has_option("a", "b"))
cf.set("A", "A-B", "A-B value")
for opt in ("a-b", "A-b", "a-B", "A-B"):
self.assertTrue(
cf.has_option("A", opt),
"has_option() returned false for option which should exist")
eq(cf.options("A"), ["a-b"])
eq(cf.options("a"), ["b"])
cf.remove_option("a", "B")
eq(cf.options("a"), [])
# SF bug #432369:
cf = self.fromstring(
"[MySection]\nOption: first line\n\tsecond line\n")
eq(cf.options("MySection"), ["option"])
eq(cf.get("MySection", "Option"), "first line\nsecond line")
# SF bug #561822:
cf = self.fromstring("[section]\nnekey=nevalue\n",
defaults={"key":"value"})
self.assertTrue(cf.has_option("section", "Key"))
def test_default_case_sensitivity(self):
cf = self.newconfig({"foo": "Bar"})
self.assertEqual(
cf.get("DEFAULT", "Foo"), "Bar",
"could not locate option, expecting case-insensitive option names")
cf = self.newconfig({"Foo": "Bar"})
self.assertEqual(
cf.get("DEFAULT", "Foo"), "Bar",
"could not locate option, expecting case-insensitive defaults")
def test_parse_errors(self):
self.newconfig()
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n extra-spaces: splat\n")
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n extra-spaces= splat\n")
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n:value-without-option-name\n")
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n=value-without-option-name\n")
self.parse_error(ConfigParser.MissingSectionHeaderError,
"No Section!\n")
def parse_error(self, exc, src):
sio = StringIO.StringIO(src)
self.assertRaises(exc, self.cf.readfp, sio)
def test_query_errors(self):
cf = self.newconfig()
self.assertEqual(cf.sections(), [],
"new ConfigParser should have no defined sections")
self.assertFalse(cf.has_section("Foo"),
"new ConfigParser should have no acknowledged "
"sections")
self.assertRaises(ConfigParser.NoSectionError,
cf.options, "Foo")
self.assertRaises(ConfigParser.NoSectionError,
cf.set, "foo", "bar", "value")
self.get_error(ConfigParser.NoSectionError, "foo", "bar")
cf.add_section("foo")
self.get_error(ConfigParser.NoOptionError, "foo", "bar")
def get_error(self, exc, section, option):
try:
self.cf.get(section, option)
except exc, e:
return e
else:
self.fail("expected exception type %s.%s"
% (exc.__module__, exc.__name__))
def test_boolean(self):
cf = self.fromstring(
"[BOOLTEST]\n"
"T1=1\n"
"T2=TRUE\n"
"T3=True\n"
"T4=oN\n"
"T5=yes\n"
"F1=0\n"
"F2=FALSE\n"
"F3=False\n"
"F4=oFF\n"
"F5=nO\n"
"E1=2\n"
"E2=foo\n"
"E3=-1\n"
"E4=0.1\n"
"E5=FALSE AND MORE"
)
for x in range(1, 5):
self.assertTrue(cf.getboolean('BOOLTEST', 't%d' % x))
self.assertFalse(cf.getboolean('BOOLTEST', 'f%d' % x))
self.assertRaises(ValueError,
cf.getboolean, 'BOOLTEST', 'e%d' % x)
def test_weird_errors(self):
cf = self.newconfig()
cf.add_section("Foo")
self.assertRaises(ConfigParser.DuplicateSectionError,
cf.add_section, "Foo")
def test_write(self):
config_string = (
"[Long Line]\n"
"foo: this line is much, much longer than my editor\n"
" likes it.\n"
"[DEFAULT]\n"
"foo: another very\n"
" long line\n"
)
if self.allow_no_value:
config_string += (
"[Valueless]\n"
"option-without-value\n"
)
cf = self.fromstring(config_string)
output = StringIO.StringIO()
cf.write(output)
expect_string = (
"[DEFAULT]\n"
"foo = another very\n"
"\tlong line\n"
"\n"
"[Long Line]\n"
"foo = this line is much, much longer than my editor\n"
"\tlikes it.\n"
"\n"
)
if self.allow_no_value:
expect_string += (
"[Valueless]\n"
"option-without-value\n"
"\n"
)
self.assertEqual(output.getvalue(), expect_string)
def test_set_string_types(self):
cf = self.fromstring("[sect]\n"
"option1=foo\n")
# Check that we don't get an exception when setting values in
# an existing section using strings:
class mystr(str):
pass
cf.set("sect", "option1", "splat")
cf.set("sect", "option1", mystr("splat"))
cf.set("sect", "option2", "splat")
cf.set("sect", "option2", mystr("splat"))
try:
unicode
except NameError:
pass
else:
cf.set("sect", "option1", unicode("splat"))
cf.set("sect", "option2", unicode("splat"))
def test_read_returns_file_list(self):
file1 = test_support.findfile("cfgparser.1")
# check when we pass a mix of readable and non-readable files:
cf = self.newconfig()
parsed_files = cf.read([file1, "nonexistent-file"])
self.assertEqual(parsed_files, [file1])
self.assertEqual(cf.get("Foo Bar", "foo"), "newbar")
# check when we pass only a filename:
cf = self.newconfig()
parsed_files = cf.read(file1)
self.assertEqual(parsed_files, [file1])
self.assertEqual(cf.get("Foo Bar", "foo"), "newbar")
# check when we pass only missing files:
cf = self.newconfig()
parsed_files = cf.read(["nonexistent-file"])
self.assertEqual(parsed_files, [])
# check when we pass no files:
cf = self.newconfig()
parsed_files = cf.read([])
self.assertEqual(parsed_files, [])
# shared by subclasses
def get_interpolation_config(self):
return self.fromstring(
"[Foo]\n"
"bar=something %(with1)s interpolation (1 step)\n"
"bar9=something %(with9)s lots of interpolation (9 steps)\n"
"bar10=something %(with10)s lots of interpolation (10 steps)\n"
"bar11=something %(with11)s lots of interpolation (11 steps)\n"
"with11=%(with10)s\n"
"with10=%(with9)s\n"
"with9=%(with8)s\n"
"with8=%(With7)s\n"
"with7=%(WITH6)s\n"
"with6=%(with5)s\n"
"With5=%(with4)s\n"
"WITH4=%(with3)s\n"
"with3=%(with2)s\n"
"with2=%(with1)s\n"
"with1=with\n"
"\n"
"[Mutual Recursion]\n"
"foo=%(bar)s\n"
"bar=%(foo)s\n"
"\n"
"[Interpolation Error]\n"
"name=%(reference)s\n",
# no definition for 'reference'
defaults={"getname": "%(__name__)s"})
def check_items_config(self, expected):
cf = self.fromstring(
"[section]\n"
"name = value\n"
"key: |%(name)s| \n"
"getdefault: |%(default)s|\n"
"getname: |%(__name__)s|",
defaults={"default": "<default>"})
L = list(cf.items("section"))
L.sort()
self.assertEqual(L, expected)
class ConfigParserTestCase(TestCaseBase):
config_class = ConfigParser.ConfigParser
allow_no_value = True
def test_interpolation(self):
rawval = {
ConfigParser.ConfigParser: ("something %(with11)s "
"lots of interpolation (11 steps)"),
ConfigParser.SafeConfigParser: "%(with1)s",
}
cf = self.get_interpolation_config()
eq = self.assertEqual
eq(cf.get("Foo", "getname"), "Foo")
eq(cf.get("Foo", "bar"), "something with interpolation (1 step)")
eq(cf.get("Foo", "bar9"),
"something with lots of interpolation (9 steps)")
eq(cf.get("Foo", "bar10"),
"something with lots of interpolation (10 steps)")
self.get_error(ConfigParser.InterpolationDepthError, "Foo", "bar11")
def test_interpolation_missing_value(self):
self.get_interpolation_config()
e = self.get_error(ConfigParser.InterpolationError,
"Interpolation Error", "name")
self.assertEqual(e.reference, "reference")
self.assertEqual(e.section, "Interpolation Error")
self.assertEqual(e.option, "name")
def test_items(self):
self.check_items_config([('default', '<default>'),
('getdefault', '|<default>|'),
('getname', '|section|'),
('key', '|value|'),
('name', 'value')])
def test_set_nonstring_types(self):
cf = self.newconfig()
cf.add_section('non-string')
cf.set('non-string', 'int', 1)
cf.set('non-string', 'list', [0, 1, 1, 2, 3, 5, 8, 13, '%('])
cf.set('non-string', 'dict', {'pi': 3.14159, '%(': 1,
'%(list)': '%(list)'})
cf.set('non-string', 'string_with_interpolation', '%(list)s')
cf.set('non-string', 'no-value')
self.assertEqual(cf.get('non-string', 'int', raw=True), 1)
self.assertRaises(TypeError, cf.get, 'non-string', 'int')
self.assertEqual(cf.get('non-string', 'list', raw=True),
[0, 1, 1, 2, 3, 5, 8, 13, '%('])
self.assertRaises(TypeError, cf.get, 'non-string', 'list')
self.assertEqual(cf.get('non-string', 'dict', raw=True),
{'pi': 3.14159, '%(': 1, '%(list)': '%(list)'})
self.assertRaises(TypeError, cf.get, 'non-string', 'dict')
self.assertEqual(cf.get('non-string', 'string_with_interpolation',
raw=True), '%(list)s')
self.assertRaises(ValueError, cf.get, 'non-string',
'string_with_interpolation', raw=False)
self.assertEqual(cf.get('non-string', 'no-value'), None)
class MultilineValuesTestCase(TestCaseBase):
config_class = ConfigParser.ConfigParser
wonderful_spam = ("I'm having spam spam spam spam "
"spam spam spam beaked beans spam "
"spam spam and spam!").replace(' ', '\t\n')
def setUp(self):
cf = self.newconfig()
for i in range(100):
s = 'section{}'.format(i)
cf.add_section(s)
for j in range(10):
cf.set(s, 'lovely_spam{}'.format(j), self.wonderful_spam)
with open(test_support.TESTFN, 'w') as f:
cf.write(f)
def tearDown(self):
os.unlink(test_support.TESTFN)
def test_dominating_multiline_values(self):
# we're reading from file because this is where the code changed
# during performance updates in Python 3.2
cf_from_file = self.newconfig()
with open(test_support.TESTFN) as f:
cf_from_file.readfp(f)
self.assertEqual(cf_from_file.get('section8', 'lovely_spam4'),
self.wonderful_spam.replace('\t\n', '\n'))
class RawConfigParserTestCase(TestCaseBase):
config_class = ConfigParser.RawConfigParser
def test_interpolation(self):
cf = self.get_interpolation_config()
eq = self.assertEqual
eq(cf.get("Foo", "getname"), "%(__name__)s")
eq(cf.get("Foo", "bar"),
"something %(with1)s interpolation (1 step)")
eq(cf.get("Foo", "bar9"),
"something %(with9)s lots of interpolation (9 steps)")
eq(cf.get("Foo", "bar10"),
"something %(with10)s lots of interpolation (10 steps)")
eq(cf.get("Foo", "bar11"),
"something %(with11)s lots of interpolation (11 steps)")
def test_items(self):
self.check_items_config([('default', '<default>'),
('getdefault', '|%(default)s|'),
('getname', '|%(__name__)s|'),
('key', '|%(name)s|'),
('name', 'value')])
def test_set_nonstring_types(self):
cf = self.newconfig()
cf.add_section('non-string')
cf.set('non-string', 'int', 1)
cf.set('non-string', 'list', [0, 1, 1, 2, 3, 5, 8, 13])
cf.set('non-string', 'dict', {'pi': 3.14159})
self.assertEqual(cf.get('non-string', 'int'), 1)
self.assertEqual(cf.get('non-string', 'list'),
[0, 1, 1, 2, 3, 5, 8, 13])
self.assertEqual(cf.get('non-string', 'dict'), {'pi': 3.14159})
class SafeConfigParserTestCase(ConfigParserTestCase):
config_class = ConfigParser.SafeConfigParser
def test_safe_interpolation(self):
# See http://www.python.org/sf/511737
cf = self.fromstring("[section]\n"
"option1=xxx\n"
"option2=%(option1)s/xxx\n"
"ok=%(option1)s/%%s\n"
"not_ok=%(option2)s/%%s")
self.assertEqual(cf.get("section", "ok"), "xxx/%s")
self.assertEqual(cf.get("section", "not_ok"), "xxx/xxx/%s")
def test_set_malformatted_interpolation(self):
cf = self.fromstring("[sect]\n"
"option1=foo\n")
self.assertEqual(cf.get('sect', "option1"), "foo")
self.assertRaises(ValueError, cf.set, "sect", "option1", "%foo")
self.assertRaises(ValueError, cf.set, "sect", "option1", "foo%")
self.assertRaises(ValueError, cf.set, "sect", "option1", "f%oo")
self.assertEqual(cf.get('sect', "option1"), "foo")
# bug #5741: double percents are *not* malformed
cf.set("sect", "option2", "foo%%bar")
self.assertEqual(cf.get("sect", "option2"), "foo%bar")
def test_set_nonstring_types(self):
cf = self.fromstring("[sect]\n"
"option1=foo\n")
# Check that we get a TypeError when setting non-string values
# in an existing section:
self.assertRaises(TypeError, cf.set, "sect", "option1", 1)
self.assertRaises(TypeError, cf.set, "sect", "option1", 1.0)
self.assertRaises(TypeError, cf.set, "sect", "option1", object())
self.assertRaises(TypeError, cf.set, "sect", "option2", 1)
self.assertRaises(TypeError, cf.set, "sect", "option2", 1.0)
self.assertRaises(TypeError, cf.set, "sect", "option2", object())
def test_add_section_default_1(self):
cf = self.newconfig()
self.assertRaises(ValueError, cf.add_section, "default")
def test_add_section_default_2(self):
cf = self.newconfig()
self.assertRaises(ValueError, cf.add_section, "DEFAULT")
class SafeConfigParserTestCaseNoValue(SafeConfigParserTestCase):
allow_no_value = True
class TestChainMap(unittest.TestCase):
def test_issue_12717(self):
d1 = dict(red=1, green=2)
d2 = dict(green=3, blue=4)
dcomb = d2.copy()
dcomb.update(d1)
cm = ConfigParser._Chainmap(d1, d2)
self.assertIsInstance(cm.keys(), list)
self.assertEqual(set(cm.keys()), set(dcomb.keys())) # keys()
self.assertEqual(set(cm.values()), set(dcomb.values())) # values()
self.assertEqual(set(cm.items()), set(dcomb.items())) # items()
self.assertEqual(set(cm), set(dcomb)) # __iter__ ()
self.assertEqual(cm, dcomb) # __eq__()
self.assertEqual([cm[k] for k in dcomb], dcomb.values()) # __getitem__()
klist = 'red green blue black brown'.split()
self.assertEqual([cm.get(k, 10) for k in klist],
[dcomb.get(k, 10) for k in klist]) # get()
self.assertEqual([k in cm for k in klist],
[k in dcomb for k in klist]) # __contains__()
with test_support.check_py3k_warnings():
self.assertEqual([cm.has_key(k) for k in klist],
[dcomb.has_key(k) for k in klist]) # has_key()
class Issue7005TestCase(unittest.TestCase):
"""Test output when None is set() as a value and allow_no_value == False.
http://bugs.python.org/issue7005
"""
expected_output = "[section]\noption = None\n\n"
def prepare(self, config_class):
# This is the default, but that's the point.
cp = config_class(allow_no_value=False)
cp.add_section("section")
cp.set("section", "option", None)
sio = StringIO.StringIO()
cp.write(sio)
return sio.getvalue()
def test_none_as_value_stringified(self):
output = self.prepare(ConfigParser.ConfigParser)
self.assertEqual(output, self.expected_output)
def test_none_as_value_stringified_raw(self):
output = self.prepare(ConfigParser.RawConfigParser)
self.assertEqual(output, self.expected_output)
class SortedTestCase(RawConfigParserTestCase):
def newconfig(self, defaults=None):
self.cf = self.config_class(defaults=defaults, dict_type=SortedDict)
return self.cf
def test_sorted(self):
self.fromstring("[b]\n"
"o4=1\n"
"o3=2\n"
"o2=3\n"
"o1=4\n"
"[a]\n"
"k=v\n")
output = StringIO.StringIO()
self.cf.write(output)
self.assertEqual(output.getvalue(),
"[a]\n"
"k = v\n\n"
"[b]\n"
"o1 = 4\n"
"o2 = 3\n"
"o3 = 2\n"
"o4 = 1\n\n")
class ExceptionPicklingTestCase(unittest.TestCase):
"""Tests for issue #13760: ConfigParser exceptions are not picklable."""
def test_error(self):
import pickle
e1 = ConfigParser.Error('value')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(repr(e1), repr(e2))
def test_nosectionerror(self):
import pickle
e1 = ConfigParser.NoSectionError('section')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.section, e2.section)
self.assertEqual(repr(e1), repr(e2))
def test_nooptionerror(self):
import pickle
e1 = ConfigParser.NoOptionError('option', 'section')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.section, e2.section)
self.assertEqual(e1.option, e2.option)
self.assertEqual(repr(e1), repr(e2))
def test_duplicatesectionerror(self):
import pickle
e1 = ConfigParser.DuplicateSectionError('section')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.section, e2.section)
self.assertEqual(repr(e1), repr(e2))
def test_interpolationerror(self):
import pickle
e1 = ConfigParser.InterpolationError('option', 'section', 'msg')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.section, e2.section)
self.assertEqual(e1.option, e2.option)
self.assertEqual(repr(e1), repr(e2))
def test_interpolationmissingoptionerror(self):
import pickle
e1 = ConfigParser.InterpolationMissingOptionError('option', 'section',
'rawval', 'reference')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.section, e2.section)
self.assertEqual(e1.option, e2.option)
self.assertEqual(e1.reference, e2.reference)
self.assertEqual(repr(e1), repr(e2))
def test_interpolationsyntaxerror(self):
import pickle
e1 = ConfigParser.InterpolationSyntaxError('option', 'section', 'msg')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.section, e2.section)
self.assertEqual(e1.option, e2.option)
self.assertEqual(repr(e1), repr(e2))
def test_interpolationdeptherror(self):
import pickle
e1 = ConfigParser.InterpolationDepthError('option', 'section',
'rawval')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.section, e2.section)
self.assertEqual(e1.option, e2.option)
self.assertEqual(repr(e1), repr(e2))
def test_parsingerror(self):
import pickle
e1 = ConfigParser.ParsingError('source')
e1.append(1, 'line1')
e1.append(2, 'line2')
e1.append(3, 'line3')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.filename, e2.filename)
self.assertEqual(e1.errors, e2.errors)
self.assertEqual(repr(e1), repr(e2))
def test_missingsectionheadererror(self):
import pickle
e1 = ConfigParser.MissingSectionHeaderError('filename', 123, 'line')
pickled = pickle.dumps(e1)
e2 = pickle.loads(pickled)
self.assertEqual(e1.message, e2.message)
self.assertEqual(e1.args, e2.args)
self.assertEqual(e1.line, e2.line)
self.assertEqual(e1.filename, e2.filename)
self.assertEqual(e1.lineno, e2.lineno)
self.assertEqual(repr(e1), repr(e2))
def test_main():
test_support.run_unittest(
ConfigParserTestCase,
MultilineValuesTestCase,
RawConfigParserTestCase,
SafeConfigParserTestCase,
SafeConfigParserTestCaseNoValue,
SortedTestCase,
Issue7005TestCase,
TestChainMap,
ExceptionPicklingTestCase,
)
if __name__ == "__main__":
test_main()
| mit | 3,451,846,095,849,251,000 | 36.593496 | 81 | 0.540333 | false |
ondrejmular/pcs | pcs/utils.py | 3 | 87422 | # pylint: disable=too-many-lines
import os
import sys
import subprocess
import xml.dom.minidom
from xml.dom.minidom import parseString
import xml.etree.ElementTree as ET
import re
import json
import tempfile
import signal
import time
from io import BytesIO
import tarfile
import getpass
import base64
import threading
import logging
from functools import lru_cache
from urllib.parse import urlencode
from typing import (
Any,
Dict,
Sequence,
Tuple,
)
from pcs import settings, usage
from pcs.common import (
file as pcs_file,
file_type_codes,
pcs_pycurl as pycurl,
)
from pcs.common.host import PcsKnownHost
from pcs.common.reports import ReportProcessor
from pcs.common.reports.item import ReportItemList
from pcs.common.reports.messages import CibUpgradeFailedToMinimalRequiredVersion
from pcs.common.services.interfaces import ServiceManagerInterface
from pcs.common.services.errors import ManageServiceError
from pcs.cli.common import middleware
from pcs.cli.common.env_cli import Env
from pcs.cli.common.errors import CmdLineInputError
from pcs.cli.common.lib_wrapper import Library
from pcs.cli.common.parse_args import InputModifiers
from pcs.cli.reports import (
output as reports_output,
process_library_reports,
ReportProcessorToConsole,
)
import pcs.cli.booth.env
from pcs.cli.file import metadata as cli_file_metadata
import pcs.lib.corosync.config_parser as corosync_conf_parser
from pcs.lib.corosync.config_facade import ConfigFacade as corosync_conf_facade
from pcs.lib.env import LibraryEnvironment
from pcs.lib.errors import LibraryError
from pcs.lib.external import (
CommandRunner,
is_proxy_set,
)
from pcs.lib.file.instance import FileInstance as LibFileInstance
from pcs.lib.interface.config import ParserErrorException
from pcs.lib.pacemaker.live import get_cluster_status_dom
from pcs.lib.pacemaker.state import ClusterState
from pcs.lib.pacemaker.values import (
is_boolean,
is_score as is_score_value,
timeout_to_seconds as get_timeout_seconds,
validate_id,
)
from pcs.lib.services import (
get_service_manager as _get_service_manager,
service_exception_to_report,
)
# pylint: disable=invalid-name
# pylint: disable=too-many-branches
# pylint: disable=too-many-locals
# pylint: disable=too-many-nested-blocks
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# usefile & filename variables are set in pcs module
usefile = False
filename = ""
# Note: not properly typed
pcs_options: Dict[Any, Any] = {}
class UnknownPropertyException(Exception):
pass
def getValidateWithVersion(dom):
"""
Commandline options: no options
"""
cib = dom.getElementsByTagName("cib")
if len(cib) != 1:
err("Bad cib")
cib = cib[0]
version = cib.getAttribute("validate-with")
r = re.compile(r"pacemaker-(\d+)\.(\d+)\.?(\d+)?")
m = r.match(version)
major = int(m.group(1))
minor = int(m.group(2))
rev = int(m.group(3) or 0)
return (major, minor, rev)
# Check the current pacemaker version in cib and upgrade it if necessary
# Returns False if not upgraded and True if upgraded
def checkAndUpgradeCIB(major, minor, rev):
"""
Commandline options:
* -f - CIB file
"""
cmajor, cminor, crev = getValidateWithVersion(get_cib_dom())
# pylint: disable=too-many-boolean-expressions
if (
cmajor > major
or (cmajor == major and cminor > minor)
or (cmajor == major and cminor == minor and crev >= rev)
):
return False
cluster_upgrade()
return True
def cluster_upgrade():
"""
Commandline options:
* -f - CIB file
"""
output, retval = run(["cibadmin", "--upgrade", "--force"])
if retval != 0:
err("unable to upgrade cluster: %s" % output)
if (
output.strip()
== "Upgrade unnecessary: Schema is already the latest available"
):
return
print("Cluster CIB has been upgraded to latest version")
def cluster_upgrade_to_version(required_version):
"""
Commandline options:
* -f - CIB file
"""
checkAndUpgradeCIB(*required_version)
dom = get_cib_dom()
current_version = getValidateWithVersion(dom)
if current_version < required_version:
err(
CibUpgradeFailedToMinimalRequiredVersion(
".".join([str(x) for x in current_version]),
".".join([str(x) for x in required_version]),
).message
)
return dom
# Check status of node
def checkStatus(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(node, "remote/status", None, False, False)
# Check and see if we're authorized (faster than a status check)
def checkAuthorization(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(node, "remote/check_auth", None, False, False)
def get_uid_gid_file_name(uid, gid):
"""
Commandline options: no options
"""
return "pcs-uidgid-%s-%s" % (uid, gid)
# Reads in uid file and returns dict of values {'uid':'theuid', 'gid':'thegid'}
def read_uid_gid_file(uidgid_filename):
"""
Commandline options: no options
"""
uidgid = {}
with open(
os.path.join(settings.corosync_uidgid_dir, uidgid_filename), "r"
) as myfile:
data = myfile.read().split("\n")
in_uidgid = False
for line in data:
line = re.sub(r"#.*", "", line)
if not in_uidgid:
if re.search(r"uidgid.*{", line):
in_uidgid = True
else:
continue
matches = re.search(r"uid:\s*(\S+)", line)
if matches:
uidgid["uid"] = matches.group(1)
matches = re.search(r"gid:\s*(\S+)", line)
if matches:
uidgid["gid"] = matches.group(1)
return uidgid
def write_uid_gid_file(uid, gid):
"""
Commandline options: no options
"""
orig_filename = get_uid_gid_file_name(uid, gid)
uidgid_filename = orig_filename
counter = 0
if find_uid_gid_files(uid, gid):
err("uidgid file with uid=%s and gid=%s already exists" % (uid, gid))
while os.path.exists(
os.path.join(settings.corosync_uidgid_dir, uidgid_filename)
):
counter = counter + 1
uidgid_filename = orig_filename + "-" + str(counter)
data = "uidgid {\n uid: %s\ngid: %s\n}\n" % (uid, gid)
with open(
os.path.join(settings.corosync_uidgid_dir, uidgid_filename), "w"
) as uidgid_file:
uidgid_file.write(data)
def find_uid_gid_files(uid, gid):
"""
Commandline options: no options
"""
if uid == "" and gid == "":
return []
found_files = []
uid_gid_files = os.listdir(settings.corosync_uidgid_dir)
for uidgid_file in uid_gid_files:
uid_gid_dict = read_uid_gid_file(uidgid_file)
if ("uid" in uid_gid_dict and uid == "") or (
"uid" not in uid_gid_dict and uid != ""
):
continue
if ("gid" in uid_gid_dict and gid == "") or (
"gid" not in uid_gid_dict and gid != ""
):
continue
if "uid" in uid_gid_dict and uid != uid_gid_dict["uid"]:
continue
if "gid" in uid_gid_dict and gid != uid_gid_dict["gid"]:
continue
found_files.append(uidgid_file)
return found_files
# Removes all uid/gid files with the specified uid/gid, returns false if we
# couldn't find one
def remove_uid_gid_file(uid, gid):
"""
Commandline options: no options
"""
if uid == "" and gid == "":
return False
file_removed = False
for uidgid_file in find_uid_gid_files(uid, gid):
os.remove(os.path.join(settings.corosync_uidgid_dir, uidgid_file))
file_removed = True
return file_removed
@lru_cache()
def read_known_hosts_file():
"""
Commandline options: no options
"""
data = {}
try:
if os.getuid() != 0:
known_hosts_raw_file = pcs_file.RawFile(
cli_file_metadata.for_file_type(file_type_codes.PCS_KNOWN_HOSTS)
)
# json.loads handles bytes, it expects utf-8, 16 or 32 encoding
known_hosts_struct = json.loads(known_hosts_raw_file.read())
else:
# TODO remove
# This is here to provide known-hosts to functions not yet
# overhauled to pcs.lib. Cli should never read known hosts from
# /var/lib/pcsd/.
known_hosts_instance = LibFileInstance.for_known_hosts()
known_hosts_struct = known_hosts_instance.read_to_structure()
# TODO use known hosts facade for getting info from json struct once the
# facade exists
data = {
name: PcsKnownHost.from_known_host_file_dict(name, host)
for name, host in known_hosts_struct["known_hosts"].items()
}
except LibraryError as e:
# TODO remove
# This is here to provide known-hosts to functions not yet
# overhauled to pcs.lib. Cli should never read known hosts from
# /var/lib/pcsd/.
process_library_reports(e.args)
except ParserErrorException as e:
# TODO remove
# This is here to provide known-hosts to functions not yet
# overhauled to pcs.lib. Cli should never read known hosts from
# /var/lib/pcsd/.
process_library_reports(
known_hosts_instance.parser_exception_to_report_list(e)
)
except pcs_file.RawFileError as e:
reports_output.warn("Unable to read the known-hosts file: " + e.reason)
except json.JSONDecodeError as e:
reports_output.warn(f"Unable to parse the known-hosts file: {e}")
except (TypeError, KeyError):
reports_output.warn("Warning: Unable to parse the known-hosts file.")
return data
def repeat_if_timeout(send_http_request_function, repeat_count=15):
"""
Commandline options: no options
NOTE: callback send_http_request_function may use --request-timeout
"""
def repeater(node, *args, **kwargs):
repeats_left = repeat_count
while True:
retval, output = send_http_request_function(node, *args, **kwargs)
if (
retval != 2
or "Operation timed out" not in output
or repeats_left < 1
):
# did not timed out OR repeat limit exceeded
return retval, output
repeats_left = repeats_left - 1
if "--debug" in pcs_options:
print("{0}: {1}, trying again...".format(node, output))
return repeater
# Set the corosync.conf file on the specified node
def getCorosyncConfig(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(node, "remote/get_corosync_conf", None, False, False)
def setCorosyncConfig(node, config):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
data = urlencode({"corosync_conf": config})
(status, data) = sendHTTPRequest(node, "remote/set_corosync_conf", data)
if status != 0:
err("Unable to set corosync config: {0}".format(data))
def getPacemakerNodeStatus(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(
node, "remote/pacemaker_node_status", None, False, False
)
def startCluster(node, quiet=False, timeout=None):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(
node,
"remote/cluster_start",
printResult=False,
printSuccess=not quiet,
timeout=timeout,
)
def stopPacemaker(node, quiet=False, force=True):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return stopCluster(
node, pacemaker=True, corosync=False, quiet=quiet, force=force
)
def stopCorosync(node, quiet=False, force=True):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return stopCluster(
node, pacemaker=False, corosync=True, quiet=quiet, force=force
)
def stopCluster(node, quiet=False, pacemaker=True, corosync=True, force=True):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
data = dict()
timeout = None
if pacemaker and not corosync:
data["component"] = "pacemaker"
timeout = 2 * 60
elif corosync and not pacemaker:
data["component"] = "corosync"
if force:
data["force"] = 1
data = urlencode(data)
return sendHTTPRequest(
node,
"remote/cluster_stop",
data,
printResult=False,
printSuccess=not quiet,
timeout=timeout,
)
def enableCluster(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(node, "remote/cluster_enable", None, False, True)
def disableCluster(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(node, "remote/cluster_disable", None, False, True)
def destroyCluster(node, quiet=False):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(
node, "remote/cluster_destroy", None, not quiet, not quiet
)
def restoreConfig(node, tarball_data):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
data = urlencode({"tarball": tarball_data})
return sendHTTPRequest(node, "remote/config_restore", data, False, True)
def pauseConfigSyncing(node, delay_seconds=300):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
data = urlencode({"sync_thread_pause": delay_seconds})
return sendHTTPRequest(node, "remote/set_sync_options", data, False, False)
def resumeConfigSyncing(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
data = urlencode({"sync_thread_resume": 1})
return sendHTTPRequest(node, "remote/set_sync_options", data, False, False)
# Send an HTTP request to a node return a tuple with status, data
# If status is 0 then data contains server response
# Otherwise if non-zero then data contains error message
# Returns a tuple (error, error message)
# 0 = Success,
# 1 = HTTP Error
# 2 = No response,
# 3 = Auth Error
# 4 = Permission denied
def sendHTTPRequest(
host, request, data=None, printResult=True, printSuccess=True, timeout=None
):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
* --debug
"""
port = None
addr = host
token = None
known_host = read_known_hosts_file().get(host, None)
# TODO: do not allow communication with unknown host
if known_host:
port = known_host.dest.port
addr = known_host.dest.addr
token = known_host.token
if port is None:
port = settings.pcsd_default_port
url = "https://{host}:{port}/{request}".format(
host="[{0}]".format(addr) if ":" in addr else addr,
request=request,
port=port,
)
if "--debug" in pcs_options:
print("Sending HTTP Request to: " + url)
print("Data: {0}".format(data))
def __debug_callback(data_type, debug_data):
prefixes = {
# pylint: disable=no-member
pycurl.DEBUG_TEXT: b"* ",
pycurl.DEBUG_HEADER_IN: b"< ",
pycurl.DEBUG_HEADER_OUT: b"> ",
pycurl.DEBUG_DATA_IN: b"<< ",
pycurl.DEBUG_DATA_OUT: b">> ",
}
if data_type in prefixes:
debug_output.write(prefixes[data_type])
debug_output.write(debug_data)
if not debug_data.endswith(b"\n"):
debug_output.write(b"\n")
output = BytesIO()
debug_output = BytesIO()
cookies = __get_cookie_list(token)
if not timeout:
timeout = settings.default_request_timeout
timeout = pcs_options.get("--request-timeout", timeout)
handler = pycurl.Curl()
handler.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTPS)
handler.setopt(pycurl.URL, url.encode("utf-8"))
handler.setopt(pycurl.WRITEFUNCTION, output.write)
handler.setopt(pycurl.VERBOSE, 1)
handler.setopt(pycurl.NOSIGNAL, 1) # required for multi-threading
handler.setopt(pycurl.DEBUGFUNCTION, __debug_callback)
handler.setopt(pycurl.TIMEOUT_MS, int(timeout * 1000))
handler.setopt(pycurl.SSL_VERIFYHOST, 0)
handler.setopt(pycurl.SSL_VERIFYPEER, 0)
handler.setopt(pycurl.HTTPHEADER, ["Expect: "])
if cookies:
handler.setopt(pycurl.COOKIE, ";".join(cookies).encode("utf-8"))
if data:
handler.setopt(pycurl.COPYPOSTFIELDS, data.encode("utf-8"))
try:
handler.perform()
response_data = output.getvalue().decode("utf-8")
response_code = handler.getinfo(pycurl.RESPONSE_CODE)
if printResult or printSuccess:
print(host + ": " + response_data.strip())
if "--debug" in pcs_options:
print("Response Code: {0}".format(response_code))
print("--Debug Response Start--\n{0}".format(response_data))
print("--Debug Response End--")
print("Communication debug info for calling: {0}".format(url))
print("--Debug Communication Output Start--")
print(debug_output.getvalue().decode("utf-8", "ignore"))
print("--Debug Communication Output End--")
print()
if response_code == 401:
output = (
3,
(
"Unable to authenticate to {node} - (HTTP error: {code}), "
"try running 'pcs host auth {node}'"
).format(node=host, code=response_code),
)
elif response_code == 403:
output = (
4,
"{node}: Permission denied - (HTTP error: {code})".format(
node=host, code=response_code
),
)
elif response_code >= 400:
output = (
1,
"Error connecting to {node} - (HTTP error: {code})".format(
node=host, code=response_code
),
)
else:
output = (0, response_data)
if printResult and output[0] != 0:
print(output[1])
return output
except pycurl.error as e:
if is_proxy_set(os.environ):
print(
"Warning: Proxy is set in environment variables, try "
"disabling it"
)
# pylint: disable=unbalanced-tuple-unpacking
dummy_errno, reason = e.args
if "--debug" in pcs_options:
print("Response Reason: {0}".format(reason))
msg = (
"Unable to connect to {host}, check if pcsd is running there or try "
"setting higher timeout with --request-timeout option ({reason})"
).format(host=host, reason=reason)
if printResult:
print(msg)
return (2, msg)
def __get_cookie_list(token):
"""
Commandline options: no options
"""
cookies = []
if token:
cookies.append("token=" + token)
if os.geteuid() == 0:
for name in ("CIB_user", "CIB_user_groups"):
if name in os.environ and os.environ[name].strip():
value = os.environ[name].strip()
# Let's be safe about characters in env variables and do base64.
# We cannot do it for CIB_user however to be backward compatible
# so we at least remove disallowed characters.
if name == "CIB_user":
value = re.sub(r"[^!-~]", "", value).replace(";", "")
else:
# python3 requires the value to be bytes not str
value = base64.b64encode(value.encode("utf8")).decode(
"utf-8"
)
cookies.append("{0}={1}".format(name, value))
return cookies
def get_corosync_conf_facade(conf_text=None):
"""
Commandline options:
* --corosync_conf - path to a mocked corosync.conf is set directly to
settings
"""
try:
return corosync_conf_facade(
corosync_conf_parser.Parser.parse(
(getCorosyncConf() if conf_text is None else conf_text).encode(
"utf-8"
)
)
)
except corosync_conf_parser.CorosyncConfParserException as e:
return err("Unable to parse corosync.conf: %s" % e)
def getNodeAttributesFromPacemaker():
"""
Commandline options: no options
"""
try:
return [
node.attrs
for node in ClusterState(
get_cluster_status_dom(cmd_runner())
).node_section.nodes
]
except LibraryError as e:
return process_library_reports(e.args)
def hasCorosyncConf():
"""
Commandline options:
* --corosync_conf - path to a mocked corosync.conf is set directly to
settings
"""
return os.path.isfile(settings.corosync_conf_file)
def getCorosyncConf():
"""
Commandline options:
* --corosync_conf - path to a mocked corosync.conf is set directly to
settings
"""
try:
out = open(settings.corosync_conf_file, "r", encoding="utf-8").read()
except IOError as e:
err("Unable to read %s: %s" % (settings.corosync_conf_file, e.strerror))
return out
def reloadCorosync():
"""
Commandline options: no options
"""
output, retval = run(["corosync-cfgtool", "-R"])
return output, retval
def getCorosyncActiveNodes():
"""
Commandline options: no options
"""
output, retval = run(["corosync-cmapctl"])
if retval != 0:
return []
nodename_re = re.compile(r"^nodelist\.node\.(\d+)\.name .*= (.*)", re.M)
nodestatus_re = re.compile(
r"^runtime\.members\.(\d+).status .*= (.*)", re.M
)
nodenameid_mapping_re = re.compile(
r"nodelist\.node\.(\d+)\.nodeid .*= (\d+)", re.M
)
node_names = nodename_re.findall(output)
index_to_id = dict(nodenameid_mapping_re.findall(output))
id_to_status = dict(nodestatus_re.findall(output))
node_status = {}
for index, node_name in node_names:
if index in index_to_id:
nodeid = index_to_id[index]
if nodeid in id_to_status:
node_status[node_name] = id_to_status[nodeid]
else:
print("Error mapping %s" % node_name)
nodes_active = []
for node, status in node_status.items():
if status == "joined":
nodes_active.append(node)
return nodes_active
# is it needed to handle corosync-qdevice service when managing cluster services
def need_to_handle_qdevice_service():
"""
Commandline options: no options
* --corosync_conf - path to a mocked corosync.conf is set directly to
settings but it doesn't make sense for contexts in which this function
is used
"""
try:
cfg = corosync_conf_facade(
corosync_conf_parser.Parser.parse(
open(settings.corosync_conf_file, "rb").read()
)
)
return cfg.has_quorum_device()
except (EnvironmentError, corosync_conf_parser.CorosyncConfParserException):
# corosync.conf not present or not valid => no qdevice specified
return False
# Restore default behavior before starting subprocesses
def subprocess_setup():
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def touch_cib_file(cib_filename):
if not os.path.isfile(cib_filename):
try:
write_empty_cib(cib_filename)
except EnvironmentError as e:
err(
"Unable to write to file: '{0}': '{1}'".format(
cib_filename, str(e)
)
)
# Run command, with environment and return (output, retval)
# DEPRECATED, please use lib.external.CommandRunner via utils.cmd_runner()
def run(
args,
ignore_stderr=False,
string_for_stdin=None,
env_extend=None,
binary_output=False,
):
"""
Commandline options:
* -f - CIB file (effective only for some pacemaker tools)
* --debug
"""
if not env_extend:
env_extend = dict()
env_var = env_extend
env_var.update(dict(os.environ))
env_var["LC_ALL"] = "C"
if usefile:
env_var["CIB_file"] = filename
touch_cib_file(filename)
command = args[0]
if command[0:3] == "crm" or command in [
"cibadmin",
"iso8601",
"stonith_admin",
]:
args[0] = os.path.join(settings.pacemaker_binaries, command)
elif command[0:8] == "corosync":
args[0] = os.path.join(settings.corosync_binaries, command)
try:
if "--debug" in pcs_options:
print("Running: " + " ".join(args))
if string_for_stdin:
print("--Debug Input Start--\n" + string_for_stdin)
print("--Debug Input End--")
# Some commands react differently if you give them anything via stdin
if string_for_stdin is not None:
stdin_pipe = subprocess.PIPE
else:
stdin_pipe = subprocess.DEVNULL
# pylint: disable=subprocess-popen-preexec-fn, consider-using-with
p = subprocess.Popen(
args,
stdin=stdin_pipe,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if ignore_stderr else subprocess.STDOUT),
preexec_fn=subprocess_setup,
close_fds=True,
env=env_var,
# decodes newlines and in python3 also converts bytes to str
universal_newlines=(not binary_output),
)
output, dummy_stderror = p.communicate(string_for_stdin)
returnVal = p.returncode
if "--debug" in pcs_options:
print("Return Value: {0}".format(returnVal))
print(("--Debug Output Start--\n{0}".format(output)).rstrip())
print("--Debug Output End--")
print()
except OSError as e:
print(e.strerror)
err("unable to locate command: " + args[0])
return output, returnVal
@lru_cache()
def cmd_runner():
"""
Commandline options:
* -f - CIB file
"""
env_vars = dict()
if usefile:
env_vars["CIB_file"] = filename
env_vars.update(os.environ)
env_vars["LC_ALL"] = "C"
return CommandRunner(
logging.getLogger("pcs"), get_report_processor(), env_vars
)
def run_pcsdcli(command, data=None):
"""
Commandline options:
* --request-timeout - timeout for HTTP request, applicable for commands:
* remove_known_hosts - only when running on cluster node (sync will
be initiated)
* auth
* send_local_configs
"""
if not data:
data = dict()
env_var = dict()
if "--debug" in pcs_options:
env_var["PCSD_DEBUG"] = "true"
if "--request-timeout" in pcs_options:
env_var["PCSD_NETWORK_TIMEOUT"] = str(pcs_options["--request-timeout"])
else:
env_var["PCSD_NETWORK_TIMEOUT"] = str(settings.default_request_timeout)
pcsd_dir_path = settings.pcsd_exec_location
pcsdcli_path = os.path.join(pcsd_dir_path, "pcsd-cli.rb")
if settings.pcsd_gem_path is not None:
env_var["GEM_HOME"] = settings.pcsd_gem_path
stdout, dummy_stderr, retval = cmd_runner().run(
[settings.ruby_executable, "-I" + pcsd_dir_path, pcsdcli_path, command],
json.dumps(data),
env_var,
)
try:
output_json = json.loads(stdout)
for key in ["status", "text", "data"]:
if key not in output_json:
output_json[key] = None
output = "".join(output_json["log"])
# check if some requests timed out, if so print message about it
if "error: operation_timedout" in output:
print("Error: Operation timed out")
# check if there are any connection failures due to proxy in pcsd and
# print warning if so
proxy_msg = "Proxy is set in environment variables, try disabling it"
if proxy_msg in output:
print("Warning: {0}".format(proxy_msg))
except ValueError:
output_json = {
"status": "bad_json_output",
"text": stdout,
"data": None,
}
return output_json, retval
def set_token_to_accept(token):
output, retval = run_pcsdcli("set_token_to_accept", dict(token=token))
if retval == 0:
if output["status"] == "access_denied":
err("Access denied")
if output["status"] != "ok":
err("Unable to communicate with pcsd")
else:
err("Unable to communicate with pcsd")
def auth_hosts_token(host_dict):
output, retval = run_pcsdcli("auth_with_token", dict(nodes=host_dict))
if retval == 0:
if output["status"] == "access_denied":
err("Access denied")
if output["status"] != "ok":
err("Unable to communicate with pcsd")
else:
err("Unable to communicate with pcsd")
def auth_hosts(host_dict):
"""
Commandline options:
* --request-timeout - timeout for HTTP request
"""
output, retval = run_pcsdcli("auth", dict(nodes=host_dict))
if retval == 0 and output["status"] == "access_denied":
err("Access denied")
if retval == 0 and output["status"] == "ok" and output["data"]:
failed = False
try:
if not output["data"]["sync_successful"]:
err(
"Some nodes had a newer known-hosts than the local node. "
+ "Local node's known-hosts were updated. "
+ "Please repeat the authentication if needed."
)
for node, result in output["data"]["auth_responses"].items():
if result["status"] == "ok":
print("{0}: Authorized".format(node))
elif result["status"] == "bad_password":
err(f"{node}: Username and/or password is incorrect", False)
failed = True
elif result["status"] in ("noresponse", "error"):
err("Unable to communicate with {0}".format(node), False)
failed = True
else:
err("Unexpected response from {0}".format(node), False)
failed = True
if output["data"]["sync_nodes_err"]:
err(
(
"Unable to synchronize and save known-hosts on nodes: "
+ "{0}. Run 'pcs host auth {1}' to make sure the nodes "
+ "are authorized."
).format(
", ".join(output["data"]["sync_nodes_err"]),
" ".join(output["data"]["sync_nodes_err"]),
)
)
except (ValueError, KeyError):
err("Unable to communicate with pcsd")
if failed:
sys.exit(1)
return
err("Unable to communicate with pcsd")
def call_local_pcsd(argv, std_in=None):
"""
Commandline options:
* --request-timeout - timeout of call to local pcsd
"""
# some commands cannot be run under a non-root account
# so we pass those commands to locally running pcsd to execute them
# returns [list_of_errors, exit_code, stdout, stderr]
data = {
"command": json.dumps(argv),
}
if std_in:
data["stdin"] = std_in
data_send = urlencode(data)
code, output = sendHTTPRequest(
"localhost", "run_pcs", data_send, False, False
)
if code == 3: # not authenticated
return [
[
"Unable to authenticate against the local pcsd. Run the same "
"command as root or authenticate yourself to the local pcsd "
"using command 'pcs client local-auth'"
],
1,
"",
"",
]
if code != 0: # http error connecting to localhost
return [[output], 1, "", ""]
try:
output_json = json.loads(output)
for key in ["status", "data"]:
if key not in output_json:
output_json[key] = None
except ValueError:
return [["Unable to communicate with pcsd"], 1, "", ""]
if output_json["status"] == "bad_command":
return [["Command not allowed"], 1, "", ""]
if output_json["status"] == "access_denied":
return [["Access denied"], 1, "", ""]
if output_json["status"] != "ok" or not output_json["data"]:
return [["Unable to communicate with pcsd"], 1, "", ""]
try:
exitcode = output_json["data"]["code"]
std_out = output_json["data"]["stdout"]
std_err = output_json["data"]["stderr"]
return [[], exitcode, std_out, std_err]
except KeyError:
return [["Unable to communicate with pcsd"], 1, "", ""]
def map_for_error_list(callab, iterab):
"""
Commandline options: no options
NOTE: callback 'callab' may use some options
"""
error_list = []
for item in iterab:
retval, error = callab(item)
if retval != 0:
error_list.append(error)
return error_list
def run_parallel(worker_list, wait_seconds=1):
"""
Commandline options: no options
"""
thread_list = []
for worker in worker_list:
thread = threading.Thread(target=worker)
thread.daemon = True
thread.start()
thread_list.append(thread)
while thread_list:
for thread in thread_list:
thread.join(wait_seconds)
if not thread.is_alive():
thread_list.remove(thread)
def create_task(report, action, node, *args, **kwargs):
"""
Commandline options: no options
"""
def worker():
returncode, output = action(node, *args, **kwargs)
report(node, returncode, output)
return worker
def create_task_list(report, action, node_list, *args, **kwargs):
"""
Commandline options: no options
"""
return [
create_task(report, action, node, *args, **kwargs) for node in node_list
]
def parallel_for_nodes(action, node_list, *args, **kwargs):
"""
Commandline options: no options
NOTE: callback 'action' may use some cmd options
"""
node_errors = dict()
def report(node, returncode, output):
message = "{0}: {1}".format(node, output.strip())
print(message)
if returncode != 0:
node_errors[node] = message
run_parallel(create_task_list(report, action, node_list, *args, **kwargs))
return node_errors
# Check if something exists in the CIB
def does_exist(xpath_query):
"""
Commandline options:
* -f - CIB file
"""
args = ["cibadmin", "-Q", "--xpath", xpath_query]
dummy_output, retval = run(args)
if retval != 0:
return False
return True
def get_group_children(group_id):
"""
Commandline options: no options
"""
child_resources = []
dom = get_cib_dom()
groups = dom.getElementsByTagName("group")
for g in groups:
if g.getAttribute("id") == group_id:
for child in g.childNodes:
if child.nodeType != xml.dom.minidom.Node.ELEMENT_NODE:
continue
if child.tagName == "primitive":
child_resources.append(child.getAttribute("id"))
return child_resources
def dom_get_clone_ms_resource(dom, clone_ms_id):
"""
Commandline options: no options
"""
clone_ms = dom_get_clone(dom, clone_ms_id) or dom_get_master(
dom, clone_ms_id
)
if clone_ms:
return dom_elem_get_clone_ms_resource(clone_ms)
return None
def dom_elem_get_clone_ms_resource(clone_ms):
"""
Commandline options: no options
"""
for child in clone_ms.childNodes:
if (
child.nodeType == xml.dom.minidom.Node.ELEMENT_NODE
and child.tagName in ["group", "primitive"]
):
return child
return None
def dom_get_resource_clone_ms_parent(dom, resource_id):
"""
Commandline options: no options
"""
resource = dom_get_resource(dom, resource_id) or dom_get_group(
dom, resource_id
)
if resource:
return dom_get_parent_by_tag_names(resource, ["clone", "master"])
return None
def dom_get_resource_bundle_parent(dom, resource_id):
"""
Commandline options: no options
"""
resource = dom_get_resource(dom, resource_id)
if resource:
return dom_get_parent_by_tag_names(resource, ["bundle"])
return None
def dom_get_master(dom, master_id):
"""
Commandline options: no options
"""
for master in dom.getElementsByTagName("master"):
if master.getAttribute("id") == master_id:
return master
return None
def dom_get_clone(dom, clone_id):
"""
Commandline options: no options
"""
for clone in dom.getElementsByTagName("clone"):
if clone.getAttribute("id") == clone_id:
return clone
return None
def dom_get_group(dom, group_id):
"""
Commandline options: no options
"""
for group in dom.getElementsByTagName("group"):
if group.getAttribute("id") == group_id:
return group
return None
def dom_get_bundle(dom, bundle_id):
"""
Commandline options: no options
"""
for bundle in dom.getElementsByTagName("bundle"):
if bundle.getAttribute("id") == bundle_id:
return bundle
return None
def dom_get_resource_bundle(bundle_el):
"""
Commandline options: no options
"""
for child in bundle_el.childNodes:
if (
child.nodeType == xml.dom.minidom.Node.ELEMENT_NODE
and child.tagName == "primitive"
):
return child
return None
def dom_get_group_clone(dom, group_id):
"""
Commandline options: no options
"""
for clone in dom.getElementsByTagName("clone"):
group = dom_get_group(clone, group_id)
if group:
return group
return None
def dom_get_group_masterslave(dom, group_id):
"""
Commandline options: no options
"""
for master in dom.getElementsByTagName("master"):
group = dom_get_group(master, group_id)
if group:
return group
return None
def dom_get_resource(dom, resource_id):
"""
Commandline options: no options
"""
for primitive in dom.getElementsByTagName("primitive"):
if primitive.getAttribute("id") == resource_id:
return primitive
return None
def dom_get_any_resource(dom, resource_id):
"""
Commandline options: no options
"""
return (
dom_get_resource(dom, resource_id)
or dom_get_group(dom, resource_id)
or dom_get_clone(dom, resource_id)
or dom_get_master(dom, resource_id)
)
def is_stonith_resource(resource_id):
"""
Commandline options:
* -f - CIB file
"""
return does_exist(
"//primitive[@id='" + resource_id + "' and @class='stonith']"
)
def dom_get_resource_clone(dom, resource_id):
"""
Commandline options: no options
"""
for clone in dom.getElementsByTagName("clone"):
resource = dom_get_resource(clone, resource_id)
if resource:
return resource
return None
def dom_get_resource_masterslave(dom, resource_id):
"""
Commandline options: no options
"""
for master in dom.getElementsByTagName("master"):
resource = dom_get_resource(master, resource_id)
if resource:
return resource
return None
# returns tuple (is_valid, error_message, correct_resource_id_if_exists)
# there is a duplicate code in pcs/lib/cib/constraint/constraint.py
# please use function in pcs/lib/cib/constraint/constraint.py
def validate_constraint_resource(dom, resource_id):
"""
Commandline options:
* --force - allow constraint on any resource
"""
resource_el = (
dom_get_clone(dom, resource_id)
or dom_get_master(dom, resource_id)
or dom_get_bundle(dom, resource_id)
)
if resource_el:
# clones, masters and bundles are always valid
return True, "", resource_id
resource_el = dom_get_resource(dom, resource_id) or dom_get_group(
dom, resource_id
)
if not resource_el:
return False, "Resource '%s' does not exist" % resource_id, None
clone_el = dom_get_resource_clone_ms_parent(
dom, resource_id
) or dom_get_resource_bundle_parent(dom, resource_id)
if not clone_el:
# a primitive and a group is valid if not in a clone nor a master nor a
# bundle
return True, "", resource_id
if "--force" in pcs_options:
return True, "", clone_el.getAttribute("id")
if clone_el.tagName in ["clone", "master"]:
return (
False,
"%s is a clone resource, you should use the clone id: %s "
"when adding constraints. Use --force to override."
% (resource_id, clone_el.getAttribute("id")),
clone_el.getAttribute("id"),
)
if clone_el.tagName == "bundle":
return (
False,
"%s is a bundle resource, you should use the bundle id: %s "
"when adding constraints. Use --force to override."
% (resource_id, clone_el.getAttribute("id")),
clone_el.getAttribute("id"),
)
return True, "", resource_id
def dom_get_resource_remote_node_name(dom_resource):
"""
Commandline options: no options
"""
if dom_resource.tagName != "primitive":
return None
if (
dom_resource.getAttribute("class").lower() == "ocf"
and dom_resource.getAttribute("provider").lower() == "pacemaker"
and dom_resource.getAttribute("type").lower() == "remote"
):
return dom_resource.getAttribute("id")
return dom_get_meta_attr_value(dom_resource, "remote-node")
def dom_get_meta_attr_value(dom_resource, meta_name):
"""
Commandline options: no options
"""
for meta in dom_resource.getElementsByTagName("meta_attributes"):
for nvpair in meta.getElementsByTagName("nvpair"):
if nvpair.getAttribute("name") == meta_name:
return nvpair.getAttribute("value")
return None
def dom_get_element_with_id(dom, tag_name, element_id):
"""
Commandline options: no options
"""
for elem in dom.getElementsByTagName(tag_name):
if elem.hasAttribute("id") and elem.getAttribute("id") == element_id:
return elem
return None
def dom_get_node(dom, node_name):
"""
Commandline options: no options
"""
for e in dom.getElementsByTagName("node"):
if e.hasAttribute("uname") and e.getAttribute("uname") == node_name:
return e
return None
def dom_get_children_by_tag_name(dom_el, tag_name):
"""
Commandline options: no options
"""
return [
node
for node in dom_el.childNodes
if node.nodeType == xml.dom.minidom.Node.ELEMENT_NODE
and node.tagName == tag_name
]
def dom_get_parent_by_tag_names(dom_el, tag_names):
"""
Commandline options: no options
"""
parent = dom_el.parentNode
while parent:
if not isinstance(parent, xml.dom.minidom.Element):
return None
if parent.tagName in tag_names:
return parent
parent = parent.parentNode
return None
def dom_attrs_to_list(dom_el, with_id=False):
"""
Commandline options: no options
"""
attributes = [
"%s=%s" % (name, value)
for name, value in sorted(dom_el.attributes.items())
if name != "id"
]
if with_id:
attributes.append("(id:%s)" % (dom_el.getAttribute("id")))
return attributes
# moved to pcs.lib.pacemaker.state
def get_resource_for_running_check(cluster_state, resource_id, stopped=False):
"""
Commandline options: no options
"""
for clone in cluster_state.getElementsByTagName("clone"):
if clone.getAttribute("id") == resource_id:
for child in clone.childNodes:
if child.nodeType == child.ELEMENT_NODE and child.tagName in [
"resource",
"group",
]:
resource_id = child.getAttribute("id")
# in a clone a resource can have an id of '<name>:N'
if ":" in resource_id:
parts = resource_id.rsplit(":", 1)
if parts[1].isdigit():
resource_id = parts[0]
break
for group in cluster_state.getElementsByTagName("group"):
# If resource is a clone it can have an id of '<resource name>:N'
if group.getAttribute("id") == resource_id or group.getAttribute(
"id"
).startswith(resource_id + ":"):
if stopped:
elem = group.getElementsByTagName("resource")[0]
else:
elem = group.getElementsByTagName("resource")[-1]
resource_id = elem.getAttribute("id")
return resource_id
# moved to pcs.lib.pacemaker.state
# see pcs.lib.commands.resource for usage
def resource_running_on(resource, passed_state=None, stopped=False):
"""
Commandline options:
* -f - has effect but doesn't make sense to check state of resource
"""
nodes_started = []
nodes_master = []
nodes_slave = []
state = passed_state if passed_state else getClusterState()
resource_original = resource
resource = get_resource_for_running_check(state, resource, stopped)
resources = state.getElementsByTagName("resource")
for res in resources:
# If resource is a clone it can have an id of '<resource name>:N'
# If resource is a clone it will be found more than once - cannot break
if (
res.getAttribute("id") == resource
or res.getAttribute("id").startswith(resource + ":")
) and res.getAttribute("failed") != "true":
for node in res.getElementsByTagName("node"):
node_name = node.getAttribute("name")
if res.getAttribute("role") == "Started":
nodes_started.append(node_name)
elif res.getAttribute("role") == "Master":
nodes_master.append(node_name)
elif res.getAttribute("role") == "Slave":
nodes_slave.append(node_name)
if not nodes_started and not nodes_master and not nodes_slave:
message = "Resource '%s' is not running on any node" % resource_original
else:
message_parts = []
for alist, label in (
(nodes_started, "running"),
(nodes_master, "master"),
(nodes_slave, "slave"),
):
if alist:
alist.sort()
message_parts.append(
"%s on node%s %s"
% (label, "s" if len(alist) > 1 else "", ", ".join(alist))
)
message = "Resource '%s' is %s." % (
resource_original,
"; ".join(message_parts),
)
return {
"message": message,
"is_running": bool(nodes_started or nodes_master or nodes_slave),
"nodes_started": nodes_started,
"nodes_master": nodes_master,
"nodes_slave": nodes_slave,
}
def validate_wait_get_timeout(need_cib_support=True):
"""
Commandline options:
* --wait
* -f - to check if -f and --wait are not used simultaneously
"""
if need_cib_support and usefile:
err("Cannot use '-f' together with '--wait'")
wait_timeout = pcs_options["--wait"]
if wait_timeout is None:
return wait_timeout
wait_timeout = get_timeout_seconds(wait_timeout)
if wait_timeout is None:
err(
"%s is not a valid number of seconds to wait"
% pcs_options["--wait"]
)
return wait_timeout
# Return matches from the CIB with the xpath_query
def get_cib_xpath(xpath_query):
"""
Commandline options:
* -f - CIB file
"""
args = ["cibadmin", "-Q", "--xpath", xpath_query]
output, retval = run(args)
if retval != 0:
return ""
return output
def get_cib(scope=None):
"""
Commandline options:
* -f - CIB file
"""
command = ["cibadmin", "-l", "-Q"]
if scope:
command.append("--scope=%s" % scope)
output, retval = run(command)
if retval != 0:
if retval == 105 and scope:
err("unable to get cib, scope '%s' not present in cib" % scope)
else:
err("unable to get cib")
return output
def get_cib_dom(cib_xml=None):
"""
Commandline options:
* -f - CIB file
"""
# pylint: disable=bare-except
if cib_xml is None:
cib_xml = get_cib()
try:
dom = parseString(cib_xml)
return dom
except:
return err("unable to get cib")
def get_cib_etree(cib_xml=None):
"""
Commandline options:
* -f - CIB file
"""
# pylint: disable=bare-except
if cib_xml is None:
cib_xml = get_cib()
try:
root = ET.fromstring(cib_xml)
return root
except:
return err("unable to get cib")
def is_etree(var):
"""
Commandline options: no options
"""
return var.__class__ == xml.etree.ElementTree.Element
# Replace only configuration section of cib with dom passed
def replace_cib_configuration(dom):
"""
Commandline options:
* -f - CIB file
"""
if is_etree(dom):
# etree returns string in bytes: b'xml'
# python 3 removed .encode() from byte strings
# run(...) calls subprocess.Popen.communicate which calls encode...
# so there is bytes to str conversion
new_dom = ET.tostring(dom).decode()
elif hasattr(dom, "toxml"):
new_dom = dom.toxml()
else:
new_dom = dom
cmd = ["cibadmin", "--replace", "-V", "--xml-pipe", "-o", "configuration"]
output, retval = run(cmd, False, new_dom)
if retval != 0:
err("Unable to update cib\n" + output)
def is_valid_cib_scope(scope):
"""
Commandline options: no options
"""
return scope in [
"acls",
"alerts",
"configuration",
"constraints",
"crm_config",
"fencing-topology",
"nodes",
"op_defaults",
"resources",
"rsc_defaults",
"tags",
]
# Checks to see if id exists in the xml dom passed
# DEPRECATED use lxml version available in pcs.lib.cib.tools
def does_id_exist(dom, check_id):
"""
Commandline options: no options
"""
# do not search in /cib/status, it may contain references to previously
# existing and deleted resources and thus preventing creating them again
if is_etree(dom):
for elem in dom.findall(
str('(/cib/*[name()!="status"]|/*[name()!="cib"])/*')
):
if elem.get("id") == check_id:
return True
else:
document = (
dom
if isinstance(dom, xml.dom.minidom.Document)
else dom.ownerDocument
)
cib_found = False
for cib in dom_get_children_by_tag_name(document, "cib"):
cib_found = True
for section in cib.childNodes:
if section.nodeType != xml.dom.minidom.Node.ELEMENT_NODE:
continue
if section.tagName == "status":
continue
for elem in section.getElementsByTagName("*"):
if elem.getAttribute("id") == check_id:
return True
if not cib_found:
for elem in document.getElementsByTagName("*"):
if elem.getAttribute("id") == check_id:
return True
return False
# Returns check_id if it doesn't exist in the dom, otherwise it adds an integer
# to the end of the id and increments it until a unique id is found
# DEPRECATED use lxml version available in pcs.lib.cib.tools
def find_unique_id(dom, check_id):
"""
Commandline options: no options
"""
counter = 1
temp_id = check_id
while does_id_exist(dom, temp_id):
temp_id = check_id + "-" + str(counter)
counter += 1
return temp_id
# Checks to see if the specified operation already exists in passed set of
# operations
# pacemaker differentiates between operations only by name and interval
def operation_exists(operations_el, op_el):
"""
Commandline options: no options
"""
existing = []
op_name = op_el.getAttribute("name")
op_interval = get_timeout_seconds(op_el.getAttribute("interval"), True)
for op in operations_el.getElementsByTagName("op"):
if (
op.getAttribute("name") == op_name
and get_timeout_seconds(op.getAttribute("interval"), True)
== op_interval
):
existing.append(op)
return existing
def operation_exists_by_name(operations_el, op_el):
"""
Commandline options: no options
"""
existing = []
op_name = op_el.getAttribute("name")
op_role = op_el.getAttribute("role") or "Started"
ocf_check_level = None
if op_name == "monitor":
ocf_check_level = get_operation_ocf_check_level(op_el)
for op in operations_el.getElementsByTagName("op"):
if op.getAttribute("name") == op_name:
if op_name != "monitor":
existing.append(op)
elif (
op.getAttribute("role") or "Started"
) == op_role and ocf_check_level == get_operation_ocf_check_level(
op
):
existing.append(op)
return existing
def get_operation_ocf_check_level(operation_el):
"""
Commandline options: no options
"""
for attr_el in operation_el.getElementsByTagName("instance_attributes"):
for nvpair_el in attr_el.getElementsByTagName("nvpair"):
if nvpair_el.getAttribute("name") == "OCF_CHECK_LEVEL":
return nvpair_el.getAttribute("value")
return None
def get_node_attributes(filter_node=None, filter_attr=None):
"""
Commandline options:
* -f - CIB file
"""
node_config = get_cib_xpath("//nodes")
if node_config == "":
err("unable to get crm_config, is pacemaker running?")
dom = parseString(node_config).documentElement
nas = dict()
for node in dom.getElementsByTagName("node"):
nodename = node.getAttribute("uname")
if filter_node is not None and nodename != filter_node:
continue
for attributes in node.getElementsByTagName("instance_attributes"):
for nvp in attributes.getElementsByTagName("nvpair"):
attr_name = nvp.getAttribute("name")
if filter_attr is not None and attr_name != filter_attr:
continue
if nodename not in nas:
nas[nodename] = dict()
nas[nodename][attr_name] = nvp.getAttribute("value")
# Use just first element of attributes. We don't support
# attributes with rules just yet.
break
return nas
def set_node_attribute(prop, value, node):
"""
Commandline options:
* -f - CIB file
* --force - no error if attribute to delete doesn't exist
"""
if value == "":
o, r = run(
[
"crm_attribute",
"-t",
"nodes",
"--node",
node,
"--name",
prop,
"--query",
]
)
if r != 0 and "--force" not in pcs_options:
err(
"attribute: '%s' doesn't exist for node: '%s'" % (prop, node),
False,
)
# This return code is used by pcsd
sys.exit(2)
o, r = run(
[
"crm_attribute",
"-t",
"nodes",
"--node",
node,
"--name",
prop,
"--delete",
]
)
else:
o, r = run(
[
"crm_attribute",
"-t",
"nodes",
"--node",
node,
"--name",
prop,
"--update",
value,
]
)
if r != 0:
err("unable to set attribute %s\n%s" % (prop, o))
# If the property exists, remove it and replace it with the new property
# If the value is blank, then we just remove it
def set_cib_property(prop, value, cib_dom=None):
"""
Commandline options:
* -f - CIB file
* --force - no error when removing non existing property
"""
update_cib = cib_dom is None
if update_cib:
crm_config = get_cib_xpath("//crm_config")
if crm_config == "":
err("unable to get crm_config, is pacemaker running?")
crm_config = parseString(crm_config).documentElement
else:
document = cib_dom.getElementsByTagName("crm_config")
if not document:
err("unable to get crm_config, is pacemaker running?")
crm_config = document[0]
property_found = False
cluster_property_set = dom_prepare_child_element(
crm_config, "cluster_property_set", "cib-bootstrap-options"
)
for child in cluster_property_set.getElementsByTagName("nvpair"):
if child.getAttribute("name") == prop:
property_found = True
break
if not property_found and value == "" and "--force" not in pcs_options:
err("can't remove property: '{0}' that doesn't exist".format(prop))
dom_update_nv_pair(
cluster_property_set, prop, value, "cib-bootstrap-options-"
)
if update_cib:
replace_cib_configuration(crm_config)
def getTerminalSize(fd=1):
"""
Returns height and width of current terminal. First tries to get
size via termios.TIOCGWINSZ, then from environment. Defaults to 25
lines x 80 columns if both methods fail.
:param fd: file descriptor (default: 1=stdout)
Commandline options: no options
"""
# pylint: disable=bare-except
try:
# pylint: disable=import-outside-toplevel
import fcntl
import termios
import struct
hw = struct.unpack(
str("hh"), fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234")
)
except:
try:
hw = (os.environ["LINES"], os.environ["COLUMNS"])
except:
hw = (25, 80)
return hw
def get_terminal_input(message=None):
"""
Commandline options: no options
"""
if message:
sys.stdout.write(message)
sys.stdout.flush()
try:
return input("")
except EOFError:
return ""
except KeyboardInterrupt:
print("Interrupted")
sys.exit(1)
def get_terminal_password(message="Password: "):
"""
Commandline options: no options
"""
if sys.stdin.isatty():
try:
return getpass.getpass(message)
except KeyboardInterrupt:
print("Interrupted")
sys.exit(1)
else:
return get_terminal_input(message)
# Returns an xml dom containing the current status of the cluster
# DEPRECATED, please use
# ClusterState(lib.pacemaker.live.get_cluster_status_dom()) instead
def getClusterState():
"""
Commandline options:
* -f - CIB file
"""
output, returncode = run(["crm_mon", "--help-all"])
format_option = (
"--output-as=xml" if "--output-as=" in output else "--as-xml"
)
xml_string, returncode = run(
["crm_mon", "--one-shot", format_option, "--inactive"],
ignore_stderr=True,
)
if returncode != 0:
err("error running crm_mon, is pacemaker running?")
return parseString(xml_string)
# DEPRECATED
# This should be all handle in pcs.lib. Currently, only pcs.config.config_show
# uses this, as it it still legacy architecture code.
def getClusterName():
"""
Commandline options:
* -f - CIB file if there is no corosync.conf
* --corosync_conf - path to a mocked corosync.conf is set directly to
settings
"""
try:
with open(settings.corosync_conf_file, "rb") as f:
conf = corosync_conf_facade(
corosync_conf_parser.Parser.parse(f.read())
)
cluster_name = conf.get_cluster_name()
if cluster_name:
return cluster_name
except (IOError, corosync_conf_parser.CorosyncConfParserException):
pass
# there is no corosync.conf on remote nodes, we can try to
# get cluster name from pacemaker
# pylint: disable=bare-except
try:
return get_set_properties("cluster-name")["cluster-name"]
except:
# we need to catch SystemExit (from utils.err), parse errors and so on
pass
return ""
def write_empty_cib(cibfile):
"""
Commandline options: no options
"""
empty_xml = """<?xml version="1.0" encoding="UTF-8"?>
<cib admin_epoch="0" epoch="1" num_updates="1" validate-with="pacemaker-1.2">
<configuration>
<crm_config/>
<nodes/>
<resources/>
<constraints/>
</configuration>
<status/>
</cib>"""
with open(cibfile, "w") as f:
f.write(empty_xml)
# Test if 'var' is a score or option (contains an '=')
def is_score_or_opt(var):
"""
Commandline options: no options
"""
if is_score(var):
return True
if var.find("=") != -1:
return True
return False
def is_score(var):
"""
Commandline options: no options
"""
return is_score_value(var)
def validate_xml_id(var: str, description: str = "id") -> Tuple[bool, str]:
"""
Commandline options: no options
"""
report_list: ReportItemList = []
validate_id(var, description, report_list)
if report_list:
return False, report_list[0].message.message
return True, ""
# deprecated, moved to pcs.lib.pacemaker.live
def is_iso8601_date(var):
"""
Commandline options: no options
"""
# using pacemaker tool to check if a value is a valid pacemaker iso8601 date
dummy_output, retVal = run(["iso8601", "-d", var])
return retVal == 0
def err(errorText, exit_after_error=True):
sys.stderr.write("Error: %s\n" % errorText)
if exit_after_error:
sys.exit(1)
@lru_cache(typed=True)
def get_service_manager() -> ServiceManagerInterface:
return _get_service_manager(cmd_runner(), get_report_processor())
def enableServices():
"""
Commandline options: no options
"""
# do NOT handle SBD in here, it is started by pacemaker not systemd or init
service_list = ["corosync", "pacemaker"]
if need_to_handle_qdevice_service():
service_list.append("corosync-qdevice")
service_manager = get_service_manager()
report_item_list = []
for service in service_list:
try:
service_manager.enable(service)
except ManageServiceError as e:
report_item_list.append(service_exception_to_report(e))
if report_item_list:
raise LibraryError(*report_item_list)
def disableServices():
"""
Commandline options: no options
"""
# do NOT handle SBD in here, it is started by pacemaker not systemd or init
service_list = ["corosync", "pacemaker"]
if need_to_handle_qdevice_service():
service_list.append("corosync-qdevice")
service_manager = get_service_manager()
report_item_list = []
for service in service_list:
try:
service_manager.disable(service)
except ManageServiceError as e:
report_item_list.append(service_exception_to_report(e))
if report_item_list:
raise LibraryError(*report_item_list)
def start_service(service):
"""
Commandline options: no options
"""
service_manager = get_service_manager()
try:
service_manager.start(service)
except ManageServiceError as e:
raise LibraryError(service_exception_to_report(e)) from e
def stop_service(service):
"""
Commandline options: no options
"""
service_manager = get_service_manager()
try:
service_manager.stop(service)
except ManageServiceError as e:
raise LibraryError(service_exception_to_report(e)) from e
def write_file(path, data, permissions=0o644, binary=False):
"""
Commandline options:
* --force - overwrite a file if it already exists
"""
if os.path.exists(path):
if "--force" not in pcs_options:
return False, "'%s' already exists, use --force to overwrite" % path
try:
os.remove(path)
except EnvironmentError as e:
return False, "unable to remove '%s': %s" % (path, e)
mode = "wb" if binary else "w"
try:
with os.fdopen(
os.open(path, os.O_WRONLY | os.O_CREAT, permissions), mode
) as outfile:
outfile.write(data)
except EnvironmentError as e:
return False, "unable to write to '%s': %s" % (path, e)
return True, ""
def tar_add_file_data(
tarball,
data,
name,
mode=None,
uid=None,
gid=None,
uname=None,
gname=None,
mtime=None,
):
# pylint: disable=too-many-arguments
"""
Commandline options: no options
"""
info = tarfile.TarInfo(name)
info.size = len(data)
info.type = tarfile.REGTYPE
info.mtime = int(time.time()) if mtime is None else mtime
if mode is not None:
info.mode = mode
if uid is not None:
info.uid = uid
if gid is not None:
info.gid = gid
if uname is not None:
info.uname = uname
if gname is not None:
info.gname = gname
data_io = BytesIO(data)
tarball.addfile(info, data_io)
data_io.close()
# DEPRECATED, please use pcs.lib.pacemaker.live.simulate_cib
def simulate_cib(cib_dom):
"""
Commandline options: no options
"""
try:
with tempfile.NamedTemporaryFile(
mode="w+", suffix=".pcs"
) as new_cib_file, tempfile.NamedTemporaryFile(
mode="w+", suffix=".pcs"
) as transitions_file:
output, retval = run(
[
"crm_simulate",
"--simulate",
"--save-output",
new_cib_file.name,
"--save-graph",
transitions_file.name,
"--xml-pipe",
],
string_for_stdin=cib_dom.toxml(),
)
if retval != 0:
return err("Unable to run crm_simulate:\n%s" % output)
new_cib_file.seek(0)
transitions_file.seek(0)
return (
output,
parseString(transitions_file.read()),
parseString(new_cib_file.read()),
)
except (EnvironmentError, xml.parsers.expat.ExpatError) as e:
return err("Unable to run crm_simulate:\n%s" % e)
except xml.etree.ElementTree.ParseError as e:
return err("Unable to run crm_simulate:\n%s" % e)
# DEPRECATED
# please use pcs.lib.pacemaker.simulate.get_operations_from_transitions
def get_operations_from_transitions(transitions_dom):
"""
Commandline options: no options
"""
operation_list = []
watched_operations = (
"start",
"stop",
"promote",
"demote",
"migrate_from",
"migrate_to",
)
for rsc_op in transitions_dom.getElementsByTagName("rsc_op"):
primitives = rsc_op.getElementsByTagName("primitive")
if not primitives:
continue
if rsc_op.getAttribute("operation").lower() not in watched_operations:
continue
for prim in primitives:
prim_id = prim.getAttribute("id")
operation_list.append(
(
int(rsc_op.getAttribute("id")),
{
"id": prim_id,
"long_id": prim.getAttribute("long-id") or prim_id,
"operation": rsc_op.getAttribute("operation").lower(),
"on_node": rsc_op.getAttribute("on_node"),
},
)
)
operation_list.sort(key=lambda x: x[0])
op_list = [op[1] for op in operation_list]
return op_list
def get_resources_location_from_operations(cib_dom, resources_operations):
"""
Commandline options:
* --force - allow constraints on any resource, may not have any effect as
an invalid constraint is ignored anyway
"""
locations = {}
for res_op in resources_operations:
operation = res_op["operation"]
if operation not in ("start", "promote", "migrate_from"):
continue
long_id = res_op["long_id"]
if long_id not in locations:
# Move clone instances as if they were non-cloned resources, it
# really works with current pacemaker (1.1.13-6). Otherwise there
# is probably no way to move them other then setting their
# stickiness to 0.
res_id = res_op["id"]
if ":" in res_id:
res_id = res_id.split(":")[0]
id_for_constraint = validate_constraint_resource(cib_dom, res_id)[2]
if not id_for_constraint:
continue
locations[long_id] = {
"id": res_op["id"],
"long_id": long_id,
"id_for_constraint": id_for_constraint,
}
if operation in ("start", "migrate_from"):
locations[long_id]["start_on_node"] = res_op["on_node"]
if operation == "promote":
locations[long_id]["promote_on_node"] = res_op["on_node"]
locations_clean = {
key: val
for key, val in locations.items()
if "start_on_node" in val or "promote_on_node" in val
}
return locations_clean
def get_remote_quorumtool_output(node):
"""
Commandline options:
* --request-timeout - timeout for HTTP requests
"""
return sendHTTPRequest(node, "remote/get_quorum_info", None, False, False)
# return True if quorumtool_output is a string returned when the node is off
def is_node_offline_by_quorumtool_output(quorum_info):
"""
Commandline options: no options
"""
return quorum_info.strip() == "Cannot initialize CMAP service"
def dom_prepare_child_element(dom_element, tag_name, id_candidate):
"""
Commandline options: no options
"""
child_elements = []
for child in dom_element.childNodes:
if child.nodeType == child.ELEMENT_NODE and child.tagName == tag_name:
child_elements.append(child)
if not child_elements:
dom = dom_element.ownerDocument
child_element = dom.createElement(tag_name)
child_element.setAttribute("id", find_unique_id(dom, id_candidate))
dom_element.appendChild(child_element)
else:
child_element = child_elements[0]
return child_element
def dom_update_nvset(dom_element, nvpair_tuples, tag_name, id_candidate):
"""
Commandline options: no options
"""
# Already ported to pcs.libcib.nvpair
# Do not ever remove the nvset element, even if it is empty. There may be
# ACLs set in pacemaker which allow "write" for nvpairs (adding, changing
# and removing) but not nvsets. In such a case, removing the nvset would
# cause the whole change to be rejected by pacemaker with a "permission
# denied" message.
# https://bugzilla.redhat.com/show_bug.cgi?id=1642514
if not nvpair_tuples:
return
only_removing = True
for name, value in nvpair_tuples:
if value != "":
only_removing = False
break
# Do not use dom.getElementsByTagName, that would get elements we do not
# want to. For example if dom_element is a clone, we would get the clones's
# as well as clone's primitive's attributes.
nvset_element_list = dom_get_children_by_tag_name(dom_element, tag_name)
# Do not create new nvset if we are only removing values from it.
if not nvset_element_list and only_removing:
return
if not nvset_element_list:
dom = dom_element.ownerDocument
nvset_element = dom.createElement(tag_name)
nvset_element.setAttribute("id", find_unique_id(dom, id_candidate))
dom_element.appendChild(nvset_element)
else:
nvset_element = nvset_element_list[0]
for name, value in nvpair_tuples:
dom_update_nv_pair(
nvset_element, name, value, nvset_element.getAttribute("id") + "-"
)
def dom_update_nv_pair(dom_element, name, value, id_prefix=""):
"""
Commandline options: no options
"""
# Do not ever remove the nvset element, even if it is empty. There may be
# ACLs set in pacemaker which allow "write" for nvpairs (adding, changing
# and removing) but not nvsets. In such a case, removing the nvset would
# cause the whole change to be rejected by pacemaker with a "permission
# denied" message.
# https://bugzilla.redhat.com/show_bug.cgi?id=1642514
dom = dom_element.ownerDocument
element_found = False
for el in dom_element.getElementsByTagName("nvpair"):
if el.getAttribute("name") == name:
element_found = True
if value == "":
dom_element.removeChild(el)
else:
el.setAttribute("value", value)
break
if not element_found and value != "":
el = dom.createElement("nvpair")
el.setAttribute("id", id_prefix + name)
el.setAttribute("name", name)
el.setAttribute("value", value)
dom_element.appendChild(el)
return dom_element
# Passed an array of strings ["a=b","c=d"], return array of tuples
# [("a","b"),("c","d")]
def convert_args_to_tuples(ra_values):
"""
Commandline options: no options
"""
ret = []
for ra_val in ra_values:
if ra_val.count("=") != 0:
split_val = ra_val.split("=", 1)
ret.append((split_val[0], split_val[1]))
return ret
def is_int(val):
try:
int(val)
return True
except ValueError:
return False
def dom_update_utilization(dom_element, attributes, id_prefix=""):
"""
Commandline options: no options
"""
attr_tuples = []
for name, value in sorted(attributes.items()):
if value != "" and not is_int(value):
err(
"Value of utilization attribute must be integer: "
"'{0}={1}'".format(name, value)
)
attr_tuples.append((name, value))
dom_update_nvset(
dom_element,
attr_tuples,
"utilization",
id_prefix + dom_element.getAttribute("id") + "-utilization",
)
def dom_update_meta_attr(dom_element, attributes):
"""
Commandline options: no options
"""
dom_update_nvset(
dom_element,
attributes,
"meta_attributes",
dom_element.getAttribute("id") + "-meta_attributes",
)
def dom_update_instance_attr(dom_element, attributes):
"""
Commandline options: no options
"""
dom_update_nvset(
dom_element,
attributes,
"instance_attributes",
dom_element.getAttribute("id") + "-instance_attributes",
)
def get_utilization(element, filter_name=None):
"""
Commandline options: no options
"""
utilization = {}
for e in element.getElementsByTagName("utilization"):
for u in e.getElementsByTagName("nvpair"):
name = u.getAttribute("name")
if filter_name is not None and name != filter_name:
continue
utilization[name] = u.getAttribute("value")
# Use just first element of utilization attributes. We don't support
# utilization with rules just yet.
break
return utilization
def get_utilization_str(element, filter_name=None):
"""
Commandline options: no options
"""
output = []
for name, value in sorted(get_utilization(element, filter_name).items()):
output.append(name + "=" + value)
return " ".join(output)
def is_valid_cluster_property(prop_def_dict, property_name, value):
"""
Commandline options: no options
"""
if property_name not in prop_def_dict:
raise UnknownPropertyException(
"unknown cluster property: '{0}'".format(property_name)
)
return is_valid_cib_value(
prop_def_dict[property_name]["type"],
value,
prop_def_dict[property_name].get("enum", []),
)
def is_valid_cib_value(value_type, value, enum_options=()):
"""
Commandline options: no options
"""
value_type = value_type.lower()
if value_type == "enum":
return value in enum_options
if value_type == "boolean":
return is_boolean(value)
if value_type == "integer":
return is_score(value)
if value_type == "time":
return get_timeout_seconds(value) is not None
return True
def get_cluster_properties_definition():
"""
Commandline options: no options
"""
# we don't want to change these properties
banned_props = ["dc-version", "cluster-infrastructure"]
basic_props = [
"batch-limit",
"no-quorum-policy",
"symmetric-cluster",
"enable-acl",
"stonith-enabled",
"stonith-action",
"pe-input-series-max",
"stop-orphan-resources",
"stop-orphan-actions",
"cluster-delay",
"start-failure-is-fatal",
"pe-error-series-max",
"pe-warn-series-max",
]
readable_names = {
"batch-limit": "Batch Limit",
"no-quorum-policy": "No Quorum Policy",
"symmetric-cluster": "Symmetric",
"stonith-enabled": "Stonith Enabled",
"stonith-action": "Stonith Action",
"cluster-delay": "Cluster Delay",
"stop-orphan-resources": "Stop Orphan Resources",
"stop-orphan-actions": "Stop Orphan Actions",
"start-failure-is-fatal": "Start Failure is Fatal",
"pe-error-series-max": "PE Error Storage",
"pe-warn-series-max": "PE Warning Storage",
"pe-input-series-max": "PE Input Storage",
"enable-acl": "Enable ACLs",
}
sources = [
{
"name": "pacemaker-schedulerd",
"path": settings.pacemaker_schedulerd,
},
{
"name": "pacemaker-controld",
"path": settings.pacemaker_controld,
},
{
"name": "pacemaker-based",
"path": settings.pacemaker_based,
},
]
definition = {}
for source in sources:
stdout, stderr, retval = cmd_runner().run([source["path"], "metadata"])
if retval != 0:
err("unable to run {0}\n{1}".format(source["name"], stderr))
try:
etree = ET.fromstring(stdout)
for e in etree.findall("./parameters/parameter"):
prop = get_cluster_property_from_xml(e)
if prop["name"] not in banned_props:
prop["source"] = source["name"]
prop["advanced"] = prop["name"] not in basic_props
if prop["name"] in readable_names:
prop["readable_name"] = readable_names[prop["name"]]
else:
prop["readable_name"] = prop["name"]
definition[prop["name"]] = prop
except xml.parsers.expat.ExpatError as e:
err(
"unable to parse {0} metadata definition: {1}".format(
source["name"], e
)
)
except ET.ParseError as e:
err(
"unable to parse {0} metadata definition: {1}".format(
source["name"], e
)
)
return definition
def get_cluster_property_from_xml(etree_el):
"""
Commandline options: no options
"""
prop = {
"name": etree_el.get("name", ""),
"shortdesc": "",
"longdesc": "",
}
for item in ["shortdesc", "longdesc"]:
item_el = etree_el.find(item)
if item_el is not None and item_el.text is not None:
prop[item] = item_el.text
content = etree_el.find("content")
if content is None:
prop["type"] = ""
prop["default"] = ""
else:
prop["type"] = content.get("type", "")
prop["default"] = content.get("default", "")
if prop["type"] == "enum":
prop["enum"] = []
if prop["longdesc"]:
values = prop["longdesc"].split(" Allowed values: ")
if len(values) == 2:
prop["enum"] = values[1].split(", ")
prop["longdesc"] = values[0]
if prop["default"] not in prop["enum"]:
prop["enum"].append(prop["default"])
if prop["longdesc"] == prop["shortdesc"]:
prop["longdesc"] = ""
return prop
def get_lib_env() -> LibraryEnvironment:
"""
Commandline options:
* -f - CIB file
* --corosync_conf - corosync.conf file
* --request-timeout - timeout of HTTP requests
"""
user = None
groups = None
if os.geteuid() == 0:
for name in ("CIB_user", "CIB_user_groups"):
if name in os.environ and os.environ[name].strip():
value = os.environ[name].strip()
if name == "CIB_user":
user = value
else:
groups = value.split(" ")
cib_data = None
if usefile:
cib_data = get_cib()
corosync_conf_data = None
if "--corosync_conf" in pcs_options:
conf = pcs_options["--corosync_conf"]
try:
corosync_conf_data = open(conf).read()
except IOError as e:
err("Unable to read %s: %s" % (conf, e.strerror))
return LibraryEnvironment(
logging.getLogger("pcs"),
get_report_processor(),
user,
groups,
cib_data,
corosync_conf_data,
known_hosts_getter=read_known_hosts_file,
request_timeout=pcs_options.get("--request-timeout"),
)
def get_cib_user_groups():
"""
Commandline options: no options
"""
user = None
groups = None
if os.geteuid() == 0:
for name in ("CIB_user", "CIB_user_groups"):
if name in os.environ and os.environ[name].strip():
value = os.environ[name].strip()
if name == "CIB_user":
user = value
else:
groups = value.split(" ")
return user, groups
def get_cli_env():
"""
Commandline options:
* --debug
* --request-timeout
"""
env = Env()
env.user, env.groups = get_cib_user_groups()
env.known_hosts_getter = read_known_hosts_file
env.report_processor = get_report_processor()
env.request_timeout = pcs_options.get("--request-timeout")
return env
def get_middleware_factory():
"""
Commandline options:
* --corosync_conf
* --name
* --booth-conf
* --booth-key
* -f
"""
return middleware.create_middleware_factory(
cib=middleware.cib(filename if usefile else None, touch_cib_file),
corosync_conf_existing=middleware.corosync_conf_existing(
pcs_options.get("--corosync_conf", None)
),
booth_conf=pcs.cli.booth.env.middleware_config(
pcs_options.get("--booth-conf", None),
pcs_options.get("--booth-key", None),
),
)
def get_library_wrapper():
"""
Commandline options:
* --debug
* --request-timeout
* --corosync_conf
* --name
* --booth-conf
* --booth-key
* -f
NOTE: usage of options may depend on used middleware for particular command
"""
return Library(get_cli_env(), get_middleware_factory())
def exit_on_cmdline_input_errror(
error: CmdLineInputError,
main_name: str,
usage_name: Sequence[str],
) -> None:
if not error or (not error.message or error.show_both_usage_and_message):
usage.show(main_name, usage_name)
if error and error.message:
err(error.message, exit_after_error=False)
if error and error.hint:
sys.stderr.write("Hint: {0}\n".format(error.hint))
sys.exit(1)
def get_report_processor() -> ReportProcessor:
return ReportProcessorToConsole(debug=("--debug" in pcs_options))
def get_set_properties(prop_name=None, defaults=None):
"""
Commandline options:
* -f - CIB file
"""
properties = {} if defaults is None else dict(defaults)
(output, retVal) = run(["cibadmin", "-Q", "--scope", "crm_config"])
if retVal != 0:
err("unable to get crm_config\n" + output)
dom = parseString(output)
de = dom.documentElement
crm_config_properties = de.getElementsByTagName("nvpair")
for prop in crm_config_properties:
if prop_name is None or (prop_name == prop.getAttribute("name")):
properties[prop.getAttribute("name")] = prop.getAttribute("value")
return properties
def get_user_and_pass():
"""
Commandline options:
* -u - username
* -p - password
"""
username = (
pcs_options["-u"]
if "-u" in pcs_options
else get_terminal_input("Username: ")
)
password = (
pcs_options["-p"] if "-p" in pcs_options else get_terminal_password()
)
return username, password
def get_input_modifiers():
return InputModifiers(pcs_options)
def get_token_from_file(file_name: str) -> str:
try:
with open(file_name, "rb") as file:
max_size = settings.pcsd_token_max_bytes # type: ignore
value_bytes = file.read(max_size + 1)
if len(value_bytes) > max_size:
err(f"Maximal token size of {max_size} bytes exceeded")
if not value_bytes:
err(f"File '{file_name}' is empty")
return base64.b64encode(value_bytes).decode("utf-8")
except OSError as e:
err(f"Unable to read file '{file_name}': {e}", exit_after_error=False)
raise SystemExit(1) from e
| gpl-2.0 | -3,652,873,456,273,879,600 | 29.302253 | 81 | 0.580872 | false |
woodpecker1/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/memoized.py | 211 | 2482 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Python does not (yet) seem to provide automatic memoization. So we've
# written a small decorator to do so.
import functools
class memoized(object):
def __init__(self, function):
self._function = function
self._results_cache = {}
def __call__(self, *args):
try:
return self._results_cache[args]
except KeyError:
# If we didn't find the args in our cache, call and save the results.
result = self._function(*args)
self._results_cache[args] = result
return result
# FIXME: We may need to handle TypeError here in the case
# that "args" is not a valid dictionary key.
# Use python "descriptor" protocol __get__ to appear
# invisible during property access.
def __get__(self, instance, owner):
# Return a function partial with obj already bound as self.
return functools.partial(self.__call__, instance)
| bsd-3-clause | -1,470,874,230,422,904,000 | 44.127273 | 81 | 0.72079 | false |
MycChiu/tensorflow | tensorflow/contrib/tensor_forest/python/kernel_tests/sample_inputs_op_test.py | 56 | 6058 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.tensor_forest.ops.sample_inputs_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.python.ops import data_ops
from tensorflow.contrib.tensor_forest.python.ops import tensor_forest_ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
class SampleInputsTest(test_util.TensorFlowTestCase):
def setUp(self):
self.input_data = [[-1., 10.], [-10., 2.], # node 1
[20., 50.], [1., -2.]] # node 2
self.node_map = [-1, 0, 1]
self.leaves = [1, 1, 2, 2]
self.split_features = [[-1, -1, -1], [1, 0, -1], [-1, -1, -1]]
self.split_thresholds = [[0., 0., 0.], [5., -2., 0.], [0., 0., 0.]]
spec_proto = data_ops.TensorForestDataSpec()
f1 = spec_proto.dense.add()
f1.name = 'f1'
f1.original_type = data_ops.DATA_FLOAT
f1.size = 1
f2 = spec_proto.dense.add()
f2.name = 'f2'
f2.original_type = data_ops.DATA_FLOAT
f2.size = 1
spec_proto.dense_features_size = 2
self.data_spec = spec_proto.SerializeToString()
def testSimple(self):
with self.test_session():
variables.global_variables_initializer().run()
(indices, feature_updates,
threshold_updates) = (tensor_forest_ops.sample_inputs(
self.input_data, [], [], [], [],
self.node_map,
self.leaves,
self.split_features,
self.split_thresholds,
split_initializations_per_input=1,
input_spec=self.data_spec,
split_sampling_random_seed=2))
self.assertAllEqual([1, 0], indices.eval())
self.assertAllEqual([[1, 0, 1], [1, 1, -1]], feature_updates.eval())
self.assertAllEqual([[5., -2., 50.], [10., 2., 0.]],
threshold_updates.eval())
def testSparse(self):
sparse_shape = [4, 10]
sparse_indices = [[0, 0], [0, 4], [0, 9],
[1, 0], [1, 7],
[2, 0],
[3, 1], [3, 4]]
sparse_values = [3.0, -1.0, 0.5,
1.5, 6.0,
-2.0,
-0.5, 2.0]
spec_proto = data_ops.TensorForestDataSpec()
f1 = spec_proto.sparse.add()
f1.name = 'f1'
f1.original_type = data_ops.DATA_FLOAT
f1.size = -1
spec_proto.dense_features_size = 0
data_spec = spec_proto.SerializeToString()
with self.test_session():
variables.global_variables_initializer().run()
(indices, feature_updates,
threshold_updates) = (tensor_forest_ops.sample_inputs(
[],
sparse_indices,
sparse_values,
sparse_shape, [],
self.node_map,
self.leaves,
self.split_features,
self.split_thresholds,
input_spec=data_spec,
split_initializations_per_input=1,
split_sampling_random_seed=3))
self.assertAllEqual([1, 0], indices.eval())
self.assertAllEqual([[1, 0, 0], [4, 0, -1]], feature_updates.eval())
self.assertAllEqual([[5., -2., -2.], [-1., 1.5, 0.]],
threshold_updates.eval())
def testWeights(self):
with self.test_session():
variables.global_variables_initializer().run()
(indices, feature_updates,
threshold_updates) = (tensor_forest_ops.sample_inputs(
self.input_data, [], [], [], [0.5, 0.1, 0.8, 0.7],
self.node_map,
self.leaves,
self.split_features,
self.split_thresholds,
input_spec=self.data_spec,
split_initializations_per_input=1,
split_sampling_random_seed=3))
self.assertAllEqual([1, 0], indices.eval())
self.assertAllEqual([[1, 0, 0], [-1, -1, -1]], feature_updates.eval())
self.assertAllEqual([[5., -2., 20.], [0., 0., 0.]],
threshold_updates.eval())
def testNoAccumulators(self):
with self.test_session():
variables.global_variables_initializer().run()
(indices, feature_updates,
threshold_updates) = (tensor_forest_ops.sample_inputs(
self.input_data, [], [], [], [], [-1] * 3,
self.leaves,
self.split_features,
self.split_thresholds,
input_spec=self.data_spec,
split_initializations_per_input=1,
split_sampling_random_seed=3))
self.assertAllEqual([], indices.eval())
self.assertAllEqual((0, 3), feature_updates.eval().shape)
self.assertAllEqual((0, 3), threshold_updates.eval().shape)
def testBadInput(self):
del self.split_features[1]
with self.test_session():
variables.global_variables_initializer().run()
with self.assertRaisesOpError(
'split_features and split_thresholds should be the same shape.'):
indices, _, _ = tensor_forest_ops.sample_inputs(
self.input_data, [], [], [], [],
self.node_map,
self.leaves,
self.split_features,
self.split_thresholds,
input_spec=self.data_spec,
split_initializations_per_input=1,
split_sampling_random_seed=3)
self.assertAllEqual([], indices.eval())
if __name__ == '__main__':
googletest.main()
| apache-2.0 | -4,207,030,896,651,375,000 | 35.939024 | 80 | 0.579234 | false |
loveward/yingsuo | shadowsocks/obfsplugin/http_simple.py | 4 | 12316 | #!/usr/bin/env python
#
# Copyright 2015-2015 breakwa11
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
import binascii
import struct
import base64
import datetime
import random
from shadowsocks import common
from shadowsocks.obfsplugin import plain
from shadowsocks.common import to_bytes, to_str, ord, chr
def create_http_simple_obfs(method):
return http_simple(method)
def create_http_post_obfs(method):
return http_post(method)
def create_random_head_obfs(method):
return random_head(method)
obfs_map = {
'http_simple': (create_http_simple_obfs,),
'http_simple_compatible': (create_http_simple_obfs,),
'http_post': (create_http_post_obfs,),
'http_post_compatible': (create_http_post_obfs,),
'random_head': (create_random_head_obfs,),
'random_head_compatible': (create_random_head_obfs,),
}
def match_begin(str1, str2):
if len(str1) >= len(str2):
if str1[:len(str2)] == str2:
return True
return False
class http_simple(plain.plain):
def __init__(self, method):
self.method = method
self.has_sent_header = False
self.has_recv_header = False
self.host = None
self.port = 0
self.recv_buffer = b''
self.user_agent = [b"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
b"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:40.0) Gecko/20100101 Firefox/44.0",
b"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36",
b"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/27.0.1453.93 Chrome/27.0.1453.93 Safari/537.36",
b"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0",
b"Mozilla/5.0 (compatible; WOW64; MSIE 10.0; Windows NT 6.2)",
b"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27",
b"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.3; Trident/7.0; .NET4.0E; .NET4.0C)",
b"Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko",
b"Mozilla/5.0 (Linux; Android 4.4; Nexus 5 Build/BuildID) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36",
b"Mozilla/5.0 (iPad; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3",
b"Mozilla/5.0 (iPhone; CPU iPhone OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3"]
def encode_head(self, buf):
hexstr = binascii.hexlify(buf)
chs = []
for i in range(0, len(hexstr), 2):
chs.append(b"%" + hexstr[i:i+2])
return b''.join(chs)
def client_encode(self, buf):
if self.has_sent_header:
return buf
head_size = len(self.server_info.iv) + self.server_info.head_len
if len(buf) - head_size > 64:
headlen = head_size + random.randint(0, 64)
else:
headlen = len(buf)
headdata = buf[:headlen]
buf = buf[headlen:]
port = b''
if self.server_info.port != 80:
port = b':' + to_bytes(str(self.server_info.port))
body = None
hosts = (self.server_info.obfs_param or self.server_info.host)
pos = hosts.find("#")
if pos >= 0:
body = hosts[pos + 1:].replace("\n", "\r\n")
body = body.replace("\\n", "\r\n")
hosts = hosts[:pos]
hosts = hosts.split(',')
host = random.choice(hosts)
http_head = b"GET /" + self.encode_head(headdata) + b" HTTP/1.1\r\n"
http_head += b"Host: " + to_bytes(host) + port + b"\r\n"
if body:
http_head += body + "\r\n\r\n"
else:
http_head += b"User-Agent: " + random.choice(self.user_agent) + b"\r\n"
http_head += b"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\nAccept-Language: en-US,en;q=0.8\r\nAccept-Encoding: gzip, deflate\r\nDNT: 1\r\nConnection: keep-alive\r\n\r\n"
self.has_sent_header = True
return http_head + buf
def client_decode(self, buf):
if self.has_recv_header:
return (buf, False)
pos = buf.find(b'\r\n\r\n')
if pos >= 0:
self.has_recv_header = True
return (buf[pos + 4:], False)
else:
return (b'', False)
def server_encode(self, buf):
if self.has_sent_header:
return buf
header = b'HTTP/1.1 200 OK\r\nConnection: keep-alive\r\nContent-Encoding: gzip\r\nContent-Type: text/html\r\nDate: '
header += to_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S GMT'))
header += b'\r\nServer: nginx\r\nVary: Accept-Encoding\r\n\r\n'
self.has_sent_header = True
return header + buf
def get_data_from_http_header(self, buf):
ret_buf = b''
lines = buf.split(b'\r\n')
if lines and len(lines) > 1:
hex_items = lines[0].split(b'%')
if hex_items and len(hex_items) > 1:
for index in range(1, len(hex_items)):
if len(hex_items[index]) < 2:
ret_buf += binascii.unhexlify('0' + hex_items[index])
break
elif len(hex_items[index]) > 2:
ret_buf += binascii.unhexlify(hex_items[index][:2])
break
else:
ret_buf += binascii.unhexlify(hex_items[index])
return ret_buf
return b''
def get_host_from_http_header(self, buf):
ret_buf = b''
lines = buf.split(b'\r\n')
if lines and len(lines) > 1:
for line in lines:
if match_begin(line, b"Host: "):
return common.to_str(line[6:])
def not_match_return(self, buf):
self.has_sent_header = True
self.has_recv_header = True
if self.method == 'http_simple':
return (b'E'*2048, False, False)
return (buf, True, False)
def error_return(self, buf):
self.has_sent_header = True
self.has_recv_header = True
return (b'E'*2048, False, False)
def server_decode(self, buf):
if self.has_recv_header:
return (buf, True, False)
self.recv_buffer += buf
buf = self.recv_buffer
if len(buf) > 10:
if match_begin(buf, b'GET ') or match_begin(buf, b'POST '):
if len(buf) > 65536:
self.recv_buffer = None
logging.warn('http_simple: over size')
return self.not_match_return(buf)
else: #not http header, run on original protocol
self.recv_buffer = None
logging.debug('http_simple: not match begin')
return self.not_match_return(buf)
else:
return (b'', True, False)
if b'\r\n\r\n' in buf:
datas = buf.split(b'\r\n\r\n', 1)
ret_buf = self.get_data_from_http_header(buf)
host = self.get_host_from_http_header(buf)
if host and self.server_info.obfs_param:
pos = host.find(":")
if pos >= 0:
host = host[:pos]
hosts = self.server_info.obfs_param.split(',')
if host not in hosts:
return self.not_match_return(buf)
if len(ret_buf) < 4:
return self.error_return(buf)
if len(datas) > 1:
ret_buf += datas[1]
if len(ret_buf) >= 13:
self.has_recv_header = True
return (ret_buf, True, False)
return self.not_match_return(buf)
else:
return (b'', True, False)
class http_post(http_simple):
def __init__(self, method):
super(http_post, self).__init__(method)
def boundary(self):
return b''.join([random.choice(b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789") for i in range(32)])
def client_encode(self, buf):
if self.has_sent_header:
return buf
head_size = len(self.server_info.iv) + self.server_info.head_len
if len(buf) - head_size > 64:
headlen = head_size + random.randint(0, 64)
else:
headlen = len(buf)
headdata = buf[:headlen]
buf = buf[headlen:]
port = b''
if self.server_info.port != 80:
port = b':' + to_bytes(str(self.server_info.port))
body = None
hosts = (self.server_info.obfs_param or self.server_info.host)
pos = hosts.find("#")
if pos >= 0:
body = hosts[pos + 1:].replace("\\n", "\r\n")
hosts = hosts[:pos]
hosts = hosts.split(',')
host = random.choice(hosts)
http_head = b"POST /" + self.encode_head(headdata) + b" HTTP/1.1\r\n"
http_head += b"Host: " + to_bytes(host) + port + b"\r\n"
if body:
http_head += body + "\r\n\r\n"
else:
http_head += b"User-Agent: " + random.choice(self.user_agent) + b"\r\n"
http_head += b"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\nAccept-Language: en-US,en;q=0.8\r\nAccept-Encoding: gzip, deflate\r\n"
http_head += b"Content-Type: multipart/form-data; boundary=" + self.boundary() + b"\r\nDNT: 1\r\n"
http_head += "Connection: keep-alive\r\n\r\n"
self.has_sent_header = True
return http_head + buf
def not_match_return(self, buf):
self.has_sent_header = True
self.has_recv_header = True
if self.method == 'http_post':
return (b'E'*2048, False, False)
return (buf, True, False)
class random_head(plain.plain):
def __init__(self, method):
self.method = method
self.has_sent_header = False
self.has_recv_header = False
self.raw_trans_sent = False
self.raw_trans_recv = False
self.send_buffer = b''
def client_encode(self, buf):
if self.raw_trans_sent:
return buf
self.send_buffer += buf
if not self.has_sent_header:
self.has_sent_header = True
data = os.urandom(common.ord(os.urandom(1)[0]) % 96 + 4)
crc = (0xffffffff - binascii.crc32(data)) & 0xffffffff
return data + struct.pack('<I', crc)
if self.raw_trans_recv:
ret = self.send_buffer
self.send_buffer = b''
self.raw_trans_sent = True
return ret
return b''
def client_decode(self, buf):
if self.raw_trans_recv:
return (buf, False)
self.raw_trans_recv = True
return (b'', True)
def server_encode(self, buf):
if self.has_sent_header:
return buf
self.has_sent_header = True
return os.urandom(common.ord(os.urandom(1)[0]) % 96 + 4)
def server_decode(self, buf):
if self.has_recv_header:
return (buf, True, False)
self.has_recv_header = True
crc = binascii.crc32(buf) & 0xffffffff
if crc != 0xffffffff:
self.has_sent_header = True
if self.method == 'random_head':
return (b'E'*2048, False, False)
return (buf, True, False)
# (buffer_to_recv, is_need_decrypt, is_need_to_encode_and_send_back)
return (b'', False, True)
| apache-2.0 | -6,484,642,626,102,714,000 | 38.22293 | 212 | 0.561221 | false |
amisrs/angular-flask | angular_flask/lib/python2.7/site-packages/pip/commands/uninstall.py | 798 | 2884 | from __future__ import absolute_import
import pip
from pip.wheel import WheelCache
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.basecommand import Command
from pip.exceptions import InstallationError
class UninstallCommand(Command):
"""
Uninstall packages.
pip is able to uninstall most installed packages. Known exceptions are:
- Pure distutils packages installed with ``python setup.py install``, which
leave behind no metadata to determine what files were installed.
- Script wrappers installed by ``python setup.py develop``.
"""
name = 'uninstall'
usage = """
%prog [options] <package> ...
%prog [options] -r <requirements file> ..."""
summary = 'Uninstall packages.'
def __init__(self, *args, **kw):
super(UninstallCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Uninstall all the packages listed in the given requirements '
'file. This option can be used multiple times.',
)
self.cmd_opts.add_option(
'-y', '--yes',
dest='yes',
action='store_true',
help="Don't ask for confirmation of uninstall deletions.")
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
with self._build_session(options) as session:
format_control = pip.index.FormatControl(set(), set())
wheel_cache = WheelCache(options.cache_dir, format_control)
requirement_set = RequirementSet(
build_dir=None,
src_dir=None,
download_dir=None,
isolated=options.isolated_mode,
session=session,
wheel_cache=wheel_cache,
)
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(
name, isolated=options.isolated_mode,
wheel_cache=wheel_cache
)
)
for filename in options.requirements:
for req in parse_requirements(
filename,
options=options,
session=session,
wheel_cache=wheel_cache):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
raise InstallationError(
'You must give at least one requirement to %(name)s (see '
'"pip help %(name)s")' % dict(name=self.name)
)
requirement_set.uninstall(auto_confirm=options.yes)
| mit | -8,289,653,058,113,993,000 | 36.947368 | 79 | 0.557559 | false |
else/mosquitto | test/lib/02-subscribe-qos2.py | 7 | 2349 | #!/usr/bin/env python
# Test whether a client sends a correct SUBSCRIBE to a topic with QoS 2.
# The client should connect to port 1888 with keepalive=60, clean session set,
# and client id subscribe-qos2-test
# The test will send a CONNACK message to the client with rc=0. Upon receiving
# the CONNACK and verifying that rc=0, the client should send a SUBSCRIBE
# message to subscribe to topic "qos2/test" with QoS=2. If rc!=0, the client
# should exit with an error.
# Upon receiving the correct SUBSCRIBE message, the test will reply with a
# SUBACK message with the accepted QoS set to 2. On receiving the SUBACK
# message, the client should send a DISCONNECT message.
import inspect
import os
import subprocess
import socket
import sys
import time
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("subscribe-qos2-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
disconnect_packet = mosq_test.gen_disconnect()
mid = 1
subscribe_packet = mosq_test.gen_subscribe(mid, "qos2/test", 2)
suback_packet = mosq_test.gen_suback(mid, 2)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(10)
sock.bind(('', 1888))
sock.listen(5)
client_args = sys.argv[1:]
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp'
try:
pp = env['PYTHONPATH']
except KeyError:
pp = ''
env['PYTHONPATH'] = '../../lib/python:'+pp
client = mosq_test.start_client(filename=sys.argv[1].replace('/', '-'), cmd=client_args, env=env)
try:
(conn, address) = sock.accept()
conn.settimeout(10)
if mosq_test.expect_packet(conn, "connect", connect_packet):
conn.send(connack_packet)
if mosq_test.expect_packet(conn, "subscribe", subscribe_packet):
conn.send(suback_packet)
if mosq_test.expect_packet(conn, "disconnect", disconnect_packet):
rc = 0
conn.close()
finally:
client.terminate()
client.wait()
sock.close()
exit(rc)
| bsd-3-clause | 72,890,002,800,830,670 | 30.32 | 129 | 0.702852 | false |
Scaravex/clue-hackathon | clustering/time_profile_cluster.py | 2 | 1438 | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 19 11:21:47 2017
@author: mskara
"""
import pandas as pd
import matplotlib.pyplot as plt
from src.pre_process import load_binary
def create_profile_for_symptoms(df, date_range=15):
profiles = {}
for symptom in symptoms:
temp = df[df['symptom'] == symptom]
sympt_profile = temp.groupby(by=temp['day_in_cycle']).mean()[0:date_range]
plt.plot(sympt_profile)
profiles[symptom] = sympt_profile
return profiles
def check_probability_access(data):
'''find probability_access'''
df_active = data['active_days']
df_cycles = data['cycles']
access_prob = []
for i in range(1, 30):
access_prob.append((df_active['day_in_cycle'] == i).sum()
/(df_cycles['cycle_length'][df_cycles['cycle_length']>=i]).count())
# access_prob.plot(X)
return access_prob
df = pd.read_csv('result.txt')
# now is done until 15 day, afterwords our predictions are wrong
daily_profiles = create_profile_for_symptoms(df,date_range = 15)
data = load_binary()
access_profile = check_probability_access(data)
plt.plot (access_profile[0:29]) # probability of access
for symptom in symptoms:
real_prob = daily_profiles[symptom].copy()
for i in range(15):
real_prob.loc[i]=real_prob.loc[i]/access_profile[i]
plt.plot(real_prob)
| apache-2.0 | 3,036,550,866,882,399,000 | 25.653846 | 93 | 0.61822 | false |
bottompawn/kbengine | kbe/res/scripts/common/Lib/multiprocessing/process.py | 98 | 9144 | #
# Module providing the `Process` class which emulates `threading.Thread`
#
# multiprocessing/process.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
__all__ = ['BaseProcess', 'current_process', 'active_children']
#
# Imports
#
import os
import sys
import signal
import itertools
from _weakrefset import WeakSet
#
#
#
try:
ORIGINAL_DIR = os.path.abspath(os.getcwd())
except OSError:
ORIGINAL_DIR = None
#
# Public functions
#
def current_process():
'''
Return process object representing the current process
'''
return _current_process
def active_children():
'''
Return list of process objects corresponding to live child processes
'''
_cleanup()
return list(_children)
#
#
#
def _cleanup():
# check for processes which have finished
for p in list(_children):
if p._popen.poll() is not None:
_children.discard(p)
#
# The `Process` class
#
class BaseProcess(object):
'''
Process objects represent activity that is run in a separate process
The class is analogous to `threading.Thread`
'''
def _Popen(self):
raise NotImplementedError
def __init__(self, group=None, target=None, name=None, args=(), kwargs={},
*, daemon=None):
assert group is None, 'group argument must be None for now'
count = next(_process_counter)
self._identity = _current_process._identity + (count,)
self._config = _current_process._config.copy()
self._parent_pid = os.getpid()
self._popen = None
self._target = target
self._args = tuple(args)
self._kwargs = dict(kwargs)
self._name = name or type(self).__name__ + '-' + \
':'.join(str(i) for i in self._identity)
if daemon is not None:
self.daemon = daemon
_dangling.add(self)
def run(self):
'''
Method to be run in sub-process; can be overridden in sub-class
'''
if self._target:
self._target(*self._args, **self._kwargs)
def start(self):
'''
Start child process
'''
assert self._popen is None, 'cannot start a process twice'
assert self._parent_pid == os.getpid(), \
'can only start a process object created by current process'
assert not _current_process._config.get('daemon'), \
'daemonic processes are not allowed to have children'
_cleanup()
self._popen = self._Popen(self)
self._sentinel = self._popen.sentinel
_children.add(self)
def terminate(self):
'''
Terminate process; sends SIGTERM signal or uses TerminateProcess()
'''
self._popen.terminate()
def join(self, timeout=None):
'''
Wait until child process terminates
'''
assert self._parent_pid == os.getpid(), 'can only join a child process'
assert self._popen is not None, 'can only join a started process'
res = self._popen.wait(timeout)
if res is not None:
_children.discard(self)
def is_alive(self):
'''
Return whether process is alive
'''
if self is _current_process:
return True
assert self._parent_pid == os.getpid(), 'can only test a child process'
if self._popen is None:
return False
self._popen.poll()
return self._popen.returncode is None
@property
def name(self):
return self._name
@name.setter
def name(self, name):
assert isinstance(name, str), 'name must be a string'
self._name = name
@property
def daemon(self):
'''
Return whether process is a daemon
'''
return self._config.get('daemon', False)
@daemon.setter
def daemon(self, daemonic):
'''
Set whether process is a daemon
'''
assert self._popen is None, 'process has already started'
self._config['daemon'] = daemonic
@property
def authkey(self):
return self._config['authkey']
@authkey.setter
def authkey(self, authkey):
'''
Set authorization key of process
'''
self._config['authkey'] = AuthenticationString(authkey)
@property
def exitcode(self):
'''
Return exit code of process or `None` if it has yet to stop
'''
if self._popen is None:
return self._popen
return self._popen.poll()
@property
def ident(self):
'''
Return identifier (PID) of process or `None` if it has yet to start
'''
if self is _current_process:
return os.getpid()
else:
return self._popen and self._popen.pid
pid = ident
@property
def sentinel(self):
'''
Return a file descriptor (Unix) or handle (Windows) suitable for
waiting for process termination.
'''
try:
return self._sentinel
except AttributeError:
raise ValueError("process not started")
def __repr__(self):
if self is _current_process:
status = 'started'
elif self._parent_pid != os.getpid():
status = 'unknown'
elif self._popen is None:
status = 'initial'
else:
if self._popen.poll() is not None:
status = self.exitcode
else:
status = 'started'
if type(status) is int:
if status == 0:
status = 'stopped'
else:
status = 'stopped[%s]' % _exitcode_to_name.get(status, status)
return '<%s(%s, %s%s)>' % (type(self).__name__, self._name,
status, self.daemon and ' daemon' or '')
##
def _bootstrap(self):
from . import util, context
global _current_process, _process_counter, _children
try:
if self._start_method is not None:
context._force_start_method(self._start_method)
_process_counter = itertools.count(1)
_children = set()
if sys.stdin is not None:
try:
sys.stdin.close()
sys.stdin = open(os.devnull)
except (OSError, ValueError):
pass
old_process = _current_process
_current_process = self
try:
util._finalizer_registry.clear()
util._run_after_forkers()
finally:
# delay finalization of the old process object until after
# _run_after_forkers() is executed
del old_process
util.info('child process calling self.run()')
try:
self.run()
exitcode = 0
finally:
util._exit_function()
except SystemExit as e:
if not e.args:
exitcode = 1
elif isinstance(e.args[0], int):
exitcode = e.args[0]
else:
sys.stderr.write(str(e.args[0]) + '\n')
exitcode = 1
except:
exitcode = 1
import traceback
sys.stderr.write('Process %s:\n' % self.name)
traceback.print_exc()
finally:
util.info('process exiting with exitcode %d' % exitcode)
sys.stdout.flush()
sys.stderr.flush()
return exitcode
#
# We subclass bytes to avoid accidental transmission of auth keys over network
#
class AuthenticationString(bytes):
def __reduce__(self):
from .context import get_spawning_popen
if get_spawning_popen() is None:
raise TypeError(
'Pickling an AuthenticationString object is '
'disallowed for security reasons'
)
return AuthenticationString, (bytes(self),)
#
# Create object representing the main process
#
class _MainProcess(BaseProcess):
def __init__(self):
self._identity = ()
self._name = 'MainProcess'
self._parent_pid = None
self._popen = None
self._config = {'authkey': AuthenticationString(os.urandom(32)),
'semprefix': '/mp'}
# Note that some versions of FreeBSD only allow named
# semaphores to have names of up to 14 characters. Therefore
# we choose a short prefix.
#
# On MacOSX in a sandbox it may be necessary to use a
# different prefix -- see #19478.
#
# Everything in self._config will be inherited by descendant
# processes.
_current_process = _MainProcess()
_process_counter = itertools.count(1)
_children = set()
del _MainProcess
#
# Give names to some return codes
#
_exitcode_to_name = {}
for name, signum in list(signal.__dict__.items()):
if name[:3]=='SIG' and '_' not in name:
_exitcode_to_name[-signum] = name
# For debug and leak testing
_dangling = WeakSet()
| lgpl-3.0 | -8,225,947,735,917,851,000 | 26.542169 | 79 | 0.55479 | false |
onceuponatimeforever/oh-mainline | vendor/packages/oauthlib/oauthlib/oauth2/rfc6749/request_validator.py | 36 | 19514 | # -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749.grant_types
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from __future__ import unicode_literals, absolute_import
import logging
log = logging.getLogger(__name__)
class RequestValidator(object):
def client_authentication_required(self, request, *args, **kwargs):
"""Determine if client authentication is required for current request.
According to the rfc6749, client authentication is required in the following cases:
- Resource Owner Password Credentials Grant, when Client type is Confidential or when
Client was issued client credentials or whenever Client provided client
authentication, see `Section 4.3.2`_.
- Authorization Code Grant, when Client type is Confidential or when Client was issued
client credentials or whenever Client provided client authentication,
see `Section 4.1.3`_.
- Refresh Token Grant, when Client type is Confidential or when Client was issued
client credentials or whenever Client provided client authentication, see
`Section 6`_
:param request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Resource Owner Password Credentials Grant
- Refresh Token Grant
.. _`Section 4.3.2`: http://tools.ietf.org/html/rfc6749#section-4.3.2
.. _`Section 4.1.3`: http://tools.ietf.org/html/rfc6749#section-4.1.3
.. _`Section 6`: http://tools.ietf.org/html/rfc6749#section-6
"""
return True
def authenticate_client(self, request, *args, **kwargs):
"""Authenticate client through means outside the OAuth 2 spec.
Means of authentication is negotiated beforehand and may for example
be `HTTP Basic Authentication Scheme`_ which utilizes the Authorization
header.
Headers may be accesses through request.headers and parameters found in
both body and query can be obtained by direct attribute access, i.e.
request.client_id for client_id in the URL query.
OBS! Certain grant types rely on this authentication, possibly with
other fallbacks, and for them to recognize this authorization please
set the client attribute on the request (request.client). Note that
preferably this client object should have a client_id attribute of
unicode type (request.client.client_id).
:param request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Resource Owner Password Credentials Grant (may be disabled)
- Client Credentials Grant
- Refresh Token Grant
.. _`HTTP Basic Authentication Scheme`: http://tools.ietf.org/html/rfc1945#section-11.1
"""
raise NotImplementedError('Subclasses must implement this method.')
def authenticate_client_id(self, client_id, request, *args, **kwargs):
"""Ensure client_id belong to a non-confidential client.
A non-confidential client is one that is not required to authenticate
through other means, such as using HTTP Basic.
Note, while not strictly necessary it can often be very convenient
to set request.client to the client object associated with the
given client_id.
:param request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def confirm_redirect_uri(self, client_id, code, redirect_uri, client,
*args, **kwargs):
"""Ensure client is authorized to redirect to the redirect_uri requested.
If the client specifies a redirect_uri when obtaining code then
that redirect URI must be bound to the code and verified equal
in this method.
All clients should register the absolute URIs of all URIs they intend
to redirect to. The registration is outside of the scope of oauthlib.
:param client_id: Unicode client identifier
:param code: Unicode authorization_code.
:param redirect_uri: Unicode absolute URI
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant (during token request)
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_default_redirect_uri(self, client_id, request, *args, **kwargs):
"""Get the default redirect URI for the client.
:param client_id: Unicode client identifier
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: The default redirect URI for the client
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_default_scopes(self, client_id, request, *args, **kwargs):
"""Get the default scopes for the client.
:param client_id: Unicode client identifier
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: List of default scopes
Method is used by all core grant types:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant
- Client Credentials grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_original_scopes(self, refresh_token, request, *args, **kwargs):
"""Get the list of scopes associated with the refresh token.
:param refresh_token: Unicode refresh token
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: List of scopes.
Method is used by:
- Refresh token grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def is_within_original_scope(self, request_scopes, refresh_token, request, *args, **kwargs):
"""Check if requested scopes are within a scope of the refresh token.
When access tokens are refreshed the scope of the new token
needs to be within the scope of the original token. This is
ensured by checking that all requested scopes strings are on
the list returned by the get_original_scopes. If this check
fails, is_within_original_scope is called. The method can be
used in situations where returning all valid scopes from the
get_original_scopes is not practical.
:param request_scopes: A list of scopes that were requested by client
:param refresh_token: Unicode refresh_token
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Refresh token grant
"""
return False
def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs):
"""Invalidate an authorization code after use.
:param client_id: Unicode client identifier
:param code: The authorization code grant (request.code).
:param request: The HTTP Request (oauthlib.common.Request)
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def revoke_token(self, token, token_type_hint, request, *args, **kwargs):
"""Revoke an access or refresh token.
:param token: The token string.
:param token_type_hint: access_token or refresh_token.
:param request: The HTTP Request (oauthlib.common.Request)
Method is used by:
- Revocation Endpoint
"""
raise NotImplementedError('Subclasses must implement this method.')
def rotate_refresh_token(self, request):
"""Determine whether to rotate the refresh token. Default, yes.
When access tokens are refreshed the old refresh token can be kept
or replaced with a new one (rotated). Return True to rotate and
and False for keeping original.
:param request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Refresh Token Grant
"""
return True
def save_authorization_code(self, client_id, code, request, *args, **kwargs):
"""Persist the authorization_code.
The code should at minimum be associated with:
- a client and it's client_id
- the redirect URI used (request.redirect_uri)
- whether the redirect URI used is the client default or not
- a resource owner / user (request.user)
- authorized scopes (request.scopes)
The authorization code grant dict (code) holds at least the key 'code'::
{'code': 'sdf345jsdf0934f'}
:param client_id: Unicode client identifier
:param code: A dict of the authorization code grant.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: The default redirect URI for the client
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def save_bearer_token(self, token, request, *args, **kwargs):
"""Persist the Bearer token.
The Bearer token should at minimum be associated with:
- a client and it's client_id, if available
- a resource owner / user (request.user)
- authorized scopes (request.scopes)
- an expiration time
- a refresh token, if issued
The Bearer token dict may hold a number of items::
{
'token_type': 'Bearer',
'access_token': 'askfjh234as9sd8',
'expires_in': 3600,
'scope': 'string of space separated authorized scopes',
'refresh_token': '23sdf876234', # if issued
'state': 'given_by_client', # if supplied by client
}
Note that while "scope" is a string-separated list of authorized scopes,
the original list is still available in request.scopes
:param client_id: Unicode client identifier
:param token: A Bearer token dict
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: The default redirect URI for the client
Method is used by all core grant types issuing Bearer tokens:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant (might not associate a client)
- Client Credentials grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_bearer_token(self, token, scopes, request):
"""Ensure the Bearer token is valid and authorized access to scopes.
:param token: A string of random characters.
:param scopes: A list of scopes associated with the protected resource.
:param request: The HTTP Request (oauthlib.common.Request)
A key to OAuth 2 security and restricting impact of leaked tokens is
the short expiration time of tokens, *always ensure the token has not
expired!*.
Two different approaches to scope validation:
1) all(scopes). The token must be authorized access to all scopes
associated with the resource. For example, the
token has access to ``read-only`` and ``images``,
thus the client can view images but not upload new.
Allows for fine grained access control through
combining various scopes.
2) any(scopes). The token must be authorized access to one of the
scopes associated with the resource. For example,
token has access to ``read-only-images``.
Allows for fine grained, although arguably less
convenient, access control.
A powerful way to use scopes would mimic UNIX ACLs and see a scope
as a group with certain privileges. For a restful API these might
map to HTTP verbs instead of read, write and execute.
Note, the request.user attribute can be set to the resource owner
associated with this token. Similarly the request.client and
request.scopes attribute can be set to associated client object
and authorized scopes. If you then use a decorator such as the
one provided for django these attributes will be made available
in all protected views as keyword arguments.
:param token: Unicode Bearer token
:param scopes: List of scopes (defined by you)
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is indirectly used by all core Bearer token issuing grant types:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant
- Client Credentials Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_client_id(self, client_id, request, *args, **kwargs):
"""Ensure client_id belong to a valid and active client.
Note, while not strictly necessary it can often be very convenient
to set request.client to the client object associated with the
given client_id.
:param request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_code(self, client_id, code, client, request, *args, **kwargs):
"""Ensure the authorization_code is valid and assigned to client.
OBS! The request.user attribute should be set to the resource owner
associated with this authorization code. Similarly request.scopes and
request.state must also be set.
:param client_id: Unicode client identifier
:param code: Unicode authorization code
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs):
"""Ensure client is authorized to use the grant_type requested.
:param client_id: Unicode client identifier
:param grant_type: Unicode grant type, i.e. authorization_code, password.
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Resource Owner Password Credentials Grant
- Client Credentials Grant
- Refresh Token Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs):
"""Ensure client is authorized to redirect to the redirect_uri requested.
All clients should register the absolute URIs of all URIs they intend
to redirect to. The registration is outside of the scope of oauthlib.
:param client_id: Unicode client identifier
:param redirect_uri: Unicode absolute URI
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs):
"""Ensure the Bearer token is valid and authorized access to scopes.
OBS! The request.user attribute should be set to the resource owner
associated with this refresh token.
:param refresh_token: Unicode refresh token
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant (indirectly by issuing refresh tokens)
- Resource Owner Password Credentials Grant (also indirectly)
- Refresh Token Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_response_type(self, client_id, response_type, client, request, *args, **kwargs):
"""Ensure client is authorized to use the response_type requested.
:param client_id: Unicode client identifier
:param response_type: Unicode response type, i.e. code, token.
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs):
"""Ensure the client is authorized access to requested scopes.
:param client_id: Unicode client identifier
:param scopes: List of scopes (defined by you)
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by all core grant types:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant
- Client Credentials Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_user(self, username, password, client, request, *args, **kwargs):
"""Ensure the username and password is valid.
OBS! The validation should also set the user attribute of the request
to a valid resource owner, i.e. request.user = username or similar. If
not set you will be unable to associate a token with a user in the
persistance method used (commonly, save_bearer_token).
:param username: Unicode username
:param password: Unicode password
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Resource Owner Password Credentials Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
| agpl-3.0 | 9,155,208,478,471,030,000 | 41.421739 | 98 | 0.647689 | false |
nathanaevitas/odoo | openerp/addons/purchase/company.py | 383 | 1576 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv,fields
class company(osv.osv):
_inherit = 'res.company'
_columns = {
'po_lead': fields.float(
'Purchase Lead Time', required=True,
help="Margin of error for supplier lead times. When the system"\
"generates Purchase Orders for procuring products,"\
"they will be scheduled that many days earlier "\
"to cope with unexpected supplier delays."),
}
_defaults = {
'po_lead': lambda *a: 1.0,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 856,910,815,874,831,600 | 39.410256 | 78 | 0.59835 | false |
willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/EnviroFacts/Toxins/FacilitiesSearchByZip.py | 5 | 4077 | # -*- coding: utf-8 -*-
###############################################################################
#
# FacilitiesSearchByZip
# Retrieves a list of EPA-regulated facilities in the Toxics Release Inventory (TRI) database within a given area code.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class FacilitiesSearchByZip(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the FacilitiesSearchByZip Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(FacilitiesSearchByZip, self).__init__(temboo_session, '/Library/EnviroFacts/Toxins/FacilitiesSearchByZip')
def new_input_set(self):
return FacilitiesSearchByZipInputSet()
def _make_result_set(self, result, path):
return FacilitiesSearchByZipResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return FacilitiesSearchByZipChoreographyExecution(session, exec_id, path)
class FacilitiesSearchByZipInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the FacilitiesSearchByZip
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) Specify the desired response format. Valid formats are: xml (the default) and csv.)
"""
super(FacilitiesSearchByZipInputSet, self)._set_input('ResponseFormat', value)
def set_RowEnd(self, value):
"""
Set the value of the RowEnd input for this Choreo. ((optional, integer) Number 1 or greater indicates the ending row number of the results displayed. Default is 4999 when RowStart is 0. Up to 5000 entries are returned in the output.)
"""
super(FacilitiesSearchByZipInputSet, self)._set_input('RowEnd', value)
def set_RowStart(self, value):
"""
Set the value of the RowStart input for this Choreo. ((optional, integer) Indicates the starting row number of the results displayed. Default is 0.)
"""
super(FacilitiesSearchByZipInputSet, self)._set_input('RowStart', value)
def set_Zip(self, value):
"""
Set the value of the Zip input for this Choreo. ((required, string) Zip code to be searched.)
"""
super(FacilitiesSearchByZipInputSet, self)._set_input('Zip', value)
class FacilitiesSearchByZipResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the FacilitiesSearchByZip Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from EnviroFacts.)
"""
return self._output.get('Response', None)
class FacilitiesSearchByZipChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return FacilitiesSearchByZipResultSet(response, path)
| gpl-2.0 | 4,873,499,442,515,005,000 | 41.030928 | 241 | 0.690459 | false |
swdream/neutron | neutron/notifiers/batch_notifier.py | 56 | 2337 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
class BatchNotifier(object):
def __init__(self, batch_interval, callback):
self.pending_events = []
self._waiting_to_send = False
self.callback = callback
self.batch_interval = batch_interval
def queue_event(self, event):
"""Called to queue sending an event with the next batch of events.
Sending events individually, as they occur, has been problematic as it
can result in a flood of sends. Previously, there was a loopingcall
thread that would send batched events on a periodic interval. However,
maintaining a persistent thread in the loopingcall was also
problematic.
This replaces the loopingcall with a mechanism that creates a
short-lived thread on demand when the first event is queued. That
thread will sleep once for the same batch_duration to allow other
events to queue up in pending_events and then will send them when it
wakes.
If a thread is already alive and waiting, this call will simply queue
the event and return leaving it up to the thread to send it.
:param event: the event that occurred.
"""
if not event:
return
self.pending_events.append(event)
if self._waiting_to_send:
return
self._waiting_to_send = True
def last_out_sends():
eventlet.sleep(self.batch_interval)
self._waiting_to_send = False
self._notify()
eventlet.spawn_n(last_out_sends)
def _notify(self):
if not self.pending_events:
return
batched_events = self.pending_events
self.pending_events = []
self.callback(batched_events)
| apache-2.0 | -8,365,651,242,111,858,000 | 34.409091 | 79 | 0.658109 | false |
Wuteyan/VTK | Examples/Rendering/Python/FilterCADPart.py | 42 | 2338 | #!/usr/bin/env python
# This simple example shows how to do simple filtering in a pipeline.
# See CADPart.py and Cylinder.py for related information.
import vtk
from vtk.util.colors import light_grey
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# This creates a polygonal cylinder model with eight circumferential
# facets.
part = vtk.vtkSTLReader()
part.SetFileName(VTK_DATA_ROOT + "/Data/42400-IDGH.stl")
# A filter is a module that takes at least one input and produces at
# least one output. The SetInput and GetOutput methods are used to do
# the connection. What is returned by GetOutput is a particulat
# dataset type. If the type is compatible with the SetInput method,
# then the filters can be connected together.
#
# Here we add a filter that computes surface normals from the geometry.
shrink = vtk.vtkShrinkPolyData()
shrink.SetInputConnection(part.GetOutputPort())
shrink.SetShrinkFactor(0.85)
# The mapper is responsible for pushing the geometry into the graphics
# library. It may also do color mapping, if scalars or other
# attributes are defined.
partMapper = vtk.vtkPolyDataMapper()
partMapper.SetInputConnection(shrink.GetOutputPort())
# The LOD actor is a special type of actor. It will change appearance
# in order to render faster. At the highest resolution, it renders
# ewverything just like an actor. The middle level is a point cloud,
# and the lowest level is a simple bounding box.
partActor = vtk.vtkLODActor()
partActor.SetMapper(partMapper)
partActor.GetProperty().SetColor(light_grey)
partActor.RotateX(30.0)
partActor.RotateY(-45.0)
# Create the graphics structure. The renderer renders into the
# render window. The render window interactor captures mouse events
# and will perform appropriate camera or actor manipulation
# depending on the nature of the events.
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
ren.AddActor(partActor)
ren.SetBackground(0.1, 0.2, 0.4)
renWin.SetSize(200, 200)
# We'll zoom in a little by accessing the camera and invoking a "Zoom"
# method on it.
ren.ResetCamera()
ren.GetActiveCamera().Zoom(1.5)
iren.Initialize()
renWin.Render()
# Start the event loop.
iren.Start()
| bsd-3-clause | 5,723,253,235,102,922,000 | 34.424242 | 71 | 0.782293 | false |
beckdaniel/GPy | GPy/util/mocap.py | 8 | 27243 | import os
import numpy as np
import math
from GPy.util import datasets as dat
class vertex:
def __init__(self, name, id, parents=[], children=[], meta = {}):
self.name = name
self.id = id
self.parents = parents
self.children = children
self.meta = meta
def __str__(self):
return self.name + '(' + str(self.id) + ').'
class tree:
def __init__(self):
self.vertices = []
self.vertices.append(vertex(name='root', id=0))
def __str__(self):
index = self.find_root()
return self.branch_str(index)
def branch_str(self, index, indent=''):
out = indent + str(self.vertices[index]) + '\n'
for child in self.vertices[index].children:
out+=self.branch_str(child, indent+' ')
return out
def find_children(self):
"""Take a tree and set the children according to the parents.
Takes a tree structure which lists the parents of each vertex
and computes the children for each vertex and places them in."""
for i in range(len(self.vertices)):
self.vertices[i].children = []
for i in range(len(self.vertices)):
for parent in self.vertices[i].parents:
if i not in self.vertices[parent].children:
self.vertices[parent].children.append(i)
def find_parents(self):
"""Take a tree and set the parents according to the children
Takes a tree structure which lists the children of each vertex
and computes the parents for each vertex and places them in."""
for i in range(len(self.vertices)):
self.vertices[i].parents = []
for i in range(len(self.vertices)):
for child in self.vertices[i].children:
if i not in self.vertices[child].parents:
self.vertices[child].parents.append(i)
def find_root(self):
"""Finds the index of the root node of the tree."""
self.find_parents()
index = 0
while len(self.vertices[index].parents)>0:
index = self.vertices[index].parents[0]
return index
def get_index_by_id(self, id):
"""Give the index associated with a given vertex id."""
for i in range(len(self.vertices)):
if self.vertices[i].id == id:
return i
raise ValueError('Reverse look up of id failed.')
def get_index_by_name(self, name):
"""Give the index associated with a given vertex name."""
for i in range(len(self.vertices)):
if self.vertices[i].name == name:
return i
raise ValueError('Reverse look up of name failed.')
def order_vertices(self):
"""Order vertices in the graph such that parents always have a lower index than children."""
ordered = False
while ordered == False:
for i in range(len(self.vertices)):
ordered = True
for parent in self.vertices[i].parents:
if parent>i:
ordered = False
self.swap_vertices(i, parent)
def swap_vertices(self, i, j):
"""
Swap two vertices in the tree structure array.
swap_vertex swaps the location of two vertices in a tree structure array.
:param tree: the tree for which two vertices are to be swapped.
:param i: the index of the first vertex to be swapped.
:param j: the index of the second vertex to be swapped.
:rval tree: the tree structure with the two vertex locations swapped.
"""
store_vertex_i = self.vertices[i]
store_vertex_j = self.vertices[j]
self.vertices[j] = store_vertex_i
self.vertices[i] = store_vertex_j
for k in range(len(self.vertices)):
for swap_list in [self.vertices[k].children, self.vertices[k].parents]:
if i in swap_list:
swap_list[swap_list.index(i)] = -1
if j in swap_list:
swap_list[swap_list.index(j)] = i
if -1 in swap_list:
swap_list[swap_list.index(-1)] = j
def rotation_matrix(xangle, yangle, zangle, order='zxy', degrees=False):
"""
Compute the rotation matrix for an angle in each direction.
This is a helper function for computing the rotation matrix for a given set of angles in a given order.
:param xangle: rotation for x-axis.
:param yangle: rotation for y-axis.
:param zangle: rotation for z-axis.
:param order: the order for the rotations.
"""
if degrees:
xangle = math.radians(xangle)
yangle = math.radians(yangle)
zangle = math.radians(zangle)
# Here we assume we rotate z, then x then y.
c1 = math.cos(xangle) # The x angle
c2 = math.cos(yangle) # The y angle
c3 = math.cos(zangle) # the z angle
s1 = math.sin(xangle)
s2 = math.sin(yangle)
s3 = math.sin(zangle)
# see http://en.wikipedia.org/wiki/Rotation_matrix for
# additional info.
if order=='zxy':
rot_mat = np.array([[c2*c3-s1*s2*s3, c2*s3+s1*s2*c3, -s2*c1],[-c1*s3, c1*c3, s1],[s2*c3+c2*s1*s3, s2*s3-c2*s1*c3, c2*c1]])
else:
rot_mat = np.eye(3)
for i in range(len(order)):
if order[i]=='x':
rot_mat = np.dot(np.array([[1, 0, 0], [0, c1, s1], [0, -s1, c1]]),rot_mat)
elif order[i] == 'y':
rot_mat = np.dot(np.array([[c2, 0, -s2], [0, 1, 0], [s2, 0, c2]]),rot_mat)
elif order[i] == 'z':
rot_mat = np.dot(np.array([[c3, s3, 0], [-s3, c3, 0], [0, 0, 1]]),rot_mat)
return rot_mat
# Motion capture data routines.
class skeleton(tree):
def __init__(self):
tree.__init__(self)
def connection_matrix(self):
connection = np.zeros((len(self.vertices), len(self.vertices)), dtype=bool)
for i in range(len(self.vertices)):
for j in range(len(self.vertices[i].children)):
connection[i, self.vertices[i].children[j]] = True
return connection
def to_xyz(self, channels):
raise NotImplementedError("this needs to be implemented to use the skeleton class")
def finalize(self):
"""After loading in a skeleton ensure parents are correct, vertex orders are correct and rotation matrices are correct."""
self.find_parents()
self.order_vertices()
self.set_rotation_matrices()
def smooth_angle_channels(self, channels):
"""Remove discontinuities in angle channels so that they don't cause artifacts in algorithms that rely on the smoothness of the functions."""
for vertex in self.vertices:
for col in vertex.meta['rot_ind']:
if col:
for k in range(1, channels.shape[0]):
diff=channels[k, col]-channels[k-1, col]
if abs(diff+360.)<abs(diff):
channels[k:, col]=channels[k:, col]+360.
elif abs(diff-360.)<abs(diff):
channels[k:, col]=channels[k:, col]-360.
# class bvh_skeleton(skeleton):
# def __init__(self):
# skeleton.__init__(self)
# def to_xyz(self, channels):
class acclaim_skeleton(skeleton):
def __init__(self, file_name=None):
skeleton.__init__(self)
self.documentation = []
self.angle = 'deg'
self.length = 1.0
self.mass = 1.0
self.type = 'acclaim'
self.vertices[0] = vertex(name='root', id=0,
parents = [0], children=[],
meta = {'orientation': [],
'axis': [0., 0., 0.],
'axis_order': [],
'C': np.eye(3),
'Cinv': np.eye(3),
'channels': [],
'bodymass': [],
'confmass': [],
'order': [],
'rot_ind': [],
'pos_ind': [],
'limits': [],
'xyz': np.array([0., 0., 0.]),
'rot': np.eye(3)})
if file_name:
self.load_skel(file_name)
def to_xyz(self, channels):
rot_val = list(self.vertices[0].meta['orientation'])
for i in range(len(self.vertices[0].meta['rot_ind'])):
rind = self.vertices[0].meta['rot_ind'][i]
if rind != -1:
rot_val[i] += channels[rind]
self.vertices[0].meta['rot'] = rotation_matrix(rot_val[0],
rot_val[1],
rot_val[2],
self.vertices[0].meta['axis_order'],
degrees=True)
# vertex based store of the xyz location
self.vertices[0].meta['xyz'] = list(self.vertices[0].meta['offset'])
for i in range(len(self.vertices[0].meta['pos_ind'])):
pind = self.vertices[0].meta['pos_ind'][i]
if pind != -1:
self.vertices[0].meta['xyz'][i] += channels[pind]
for i in range(len(self.vertices[0].children)):
ind = self.vertices[0].children[i]
self.get_child_xyz(ind, channels)
xyz = []
for vertex in self.vertices:
xyz.append(vertex.meta['xyz'])
return np.array(xyz)
def get_child_xyz(self, ind, channels):
parent = self.vertices[ind].parents[0]
children = self.vertices[ind].children
rot_val = np.zeros(3)
for j in range(len(self.vertices[ind].meta['rot_ind'])):
rind = self.vertices[ind].meta['rot_ind'][j]
if rind != -1:
rot_val[j] = channels[rind]
else:
rot_val[j] = 0
tdof = rotation_matrix(rot_val[0], rot_val[1], rot_val[2],
self.vertices[ind].meta['order'],
degrees=True)
torient = rotation_matrix(self.vertices[ind].meta['axis'][0],
self.vertices[ind].meta['axis'][1],
self.vertices[ind].meta['axis'][2],
self.vertices[ind].meta['axis_order'],
degrees=True)
torient_inv = rotation_matrix(-self.vertices[ind].meta['axis'][0],
-self.vertices[ind].meta['axis'][1],
-self.vertices[ind].meta['axis'][2],
self.vertices[ind].meta['axis_order'][::-1],
degrees=True)
self.vertices[ind].meta['rot'] = np.dot(np.dot(np.dot(torient_inv,tdof),torient),self.vertices[parent].meta['rot'])
self.vertices[ind].meta['xyz'] = self.vertices[parent].meta['xyz'] + np.dot(self.vertices[ind].meta['offset'],self.vertices[ind].meta['rot'])
for i in range(len(children)):
cind = children[i]
self.get_child_xyz(cind, channels)
def load_channels(self, file_name):
fid=open(file_name, 'r')
channels = self.read_channels(fid)
fid.close()
return channels
def load_skel(self, file_name):
"""
Loads an ASF file into a skeleton structure.
:param file_name: The file name to load in.
"""
fid = open(file_name, 'r')
self.read_skel(fid)
fid.close()
self.name = file_name
def read_bonedata(self, fid):
"""Read bone data from an acclaim skeleton file stream."""
bone_count = 0
lin = self.read_line(fid)
while lin[0]!=':':
parts = lin.split()
if parts[0] == 'begin':
bone_count += 1
self.vertices.append(vertex(name = '', id=np.NaN,
meta={'name': [],
'id': [],
'offset': [],
'orientation': [],
'axis': [0., 0., 0.],
'axis_order': [],
'C': np.eye(3),
'Cinv': np.eye(3),
'channels': [],
'bodymass': [],
'confmass': [],
'order': [],
'rot_ind': [],
'pos_ind': [],
'limits': [],
'xyz': np.array([0., 0., 0.]),
'rot': np.eye(3)}))
lin = self.read_line(fid)
elif parts[0]=='id':
self.vertices[bone_count].id = int(parts[1])
lin = self.read_line(fid)
self.vertices[bone_count].children = []
elif parts[0]=='name':
self.vertices[bone_count].name = parts[1]
lin = self.read_line(fid)
elif parts[0]=='direction':
direction = np.array([float(parts[1]), float(parts[2]), float(parts[3])])
lin = self.read_line(fid)
elif parts[0]=='length':
lgth = float(parts[1])
lin = self.read_line(fid)
elif parts[0]=='axis':
self.vertices[bone_count].meta['axis'] = np.array([float(parts[1]),
float(parts[2]),
float(parts[3])])
# order is reversed compared to bvh
self.vertices[bone_count].meta['axis_order'] = parts[-1][::-1].lower()
lin = self.read_line(fid)
elif parts[0]=='dof':
order = []
for i in range(1, len(parts)):
if parts[i]== 'rx':
chan = 'Xrotation'
order.append('x')
elif parts[i] =='ry':
chan = 'Yrotation'
order.append('y')
elif parts[i] == 'rz':
chan = 'Zrotation'
order.append('z')
elif parts[i] == 'tx':
chan = 'Xposition'
elif parts[i] == 'ty':
chan = 'Yposition'
elif parts[i] == 'tz':
chan = 'Zposition'
elif parts[i] == 'l':
chan = 'length'
self.vertices[bone_count].meta['channels'].append(chan)
# order is reversed compared to bvh
self.vertices[bone_count].meta['order'] = order[::-1]
lin = self.read_line(fid)
elif parts[0]=='limits':
self.vertices[bone_count].meta['limits'] = [[float(parts[1][1:]), float(parts[2][:-1])]]
lin = self.read_line(fid)
while lin !='end':
parts = lin.split()
self.vertices[bone_count].meta['limits'].append([float(parts[0][1:]), float(parts[1][:-1])])
lin = self.read_line(fid)
self.vertices[bone_count].meta['limits'] = np.array(self.vertices[bone_count].meta['limits'])
elif parts[0]=='end':
self.vertices[bone_count].meta['offset'] = direction*lgth
lin = self.read_line(fid)
return lin
def read_channels(self, fid):
"""Read channels from an acclaim file."""
bones = [[] for i in self.vertices]
num_channels = 0
for vertex in self.vertices:
num_channels = num_channels + len(vertex.meta['channels'])
lin = self.read_line(fid)
while lin != ':DEGREES':
lin = self.read_line(fid)
if lin == '':
raise ValueError('Could not find :DEGREES in ' + fid.name)
counter = 0
lin = self.read_line(fid)
while lin:
parts = lin.split()
if len(parts)==1:
frame_no = int(parts[0])
if frame_no:
counter += 1
if counter != frame_no:
raise ValueError('Unexpected frame number.')
else:
raise ValueError('Single bone name ...')
else:
ind = self.get_index_by_name(parts[0])
bones[ind].append(np.array([float(channel) for channel in parts[1:]]))
lin = self.read_line(fid)
num_frames = counter
channels = np.zeros((num_frames, num_channels))
end_val = 0
for i in range(len(self.vertices)):
vertex = self.vertices[i]
if len(vertex.meta['channels'])>0:
start_val = end_val
end_val = end_val + len(vertex.meta['channels'])
for j in range(num_frames):
channels[j, start_val:end_val] = bones[i][j]
self.resolve_indices(i, start_val)
self.smooth_angle_channels(channels)
return channels
def read_documentation(self, fid):
"""Read documentation from an acclaim skeleton file stream."""
lin = self.read_line(fid)
while lin[0] != ':':
self.documentation.append(lin)
lin = self.read_line(fid)
return lin
def read_hierarchy(self, fid):
"""Read hierarchy information from acclaim skeleton file stream."""
lin = self.read_line(fid)
while lin != 'end':
parts = lin.split()
if lin != 'begin':
ind = self.get_index_by_name(parts[0])
for i in range(1, len(parts)):
self.vertices[ind].children.append(self.get_index_by_name(parts[i]))
lin = self.read_line(fid)
lin = self.read_line(fid)
return lin
def read_line(self, fid):
"""Read a line from a file string and check it isn't either empty or commented before returning."""
lin = '#'
while lin[0] == '#':
lin = fid.readline().strip()
if lin == '':
return lin
return lin
def read_root(self, fid):
"""Read the root node from an acclaim skeleton file stream."""
lin = self.read_line(fid)
while lin[0] != ':':
parts = lin.split()
if parts[0]=='order':
order = []
for i in range(1, len(parts)):
if parts[i].lower()=='rx':
chan = 'Xrotation'
order.append('x')
elif parts[i].lower()=='ry':
chan = 'Yrotation'
order.append('y')
elif parts[i].lower()=='rz':
chan = 'Zrotation'
order.append('z')
elif parts[i].lower()=='tx':
chan = 'Xposition'
elif parts[i].lower()=='ty':
chan = 'Yposition'
elif parts[i].lower()=='tz':
chan = 'Zposition'
elif parts[i].lower()=='l':
chan = 'length'
self.vertices[0].meta['channels'].append(chan)
# order is reversed compared to bvh
self.vertices[0].meta['order'] = order[::-1]
elif parts[0]=='axis':
# order is reversed compared to bvh
self.vertices[0].meta['axis_order'] = parts[1][::-1].lower()
elif parts[0]=='position':
self.vertices[0].meta['offset'] = [float(parts[1]),
float(parts[2]),
float(parts[3])]
elif parts[0]=='orientation':
self.vertices[0].meta['orientation'] = [float(parts[1]),
float(parts[2]),
float(parts[3])]
lin = self.read_line(fid)
return lin
def read_skel(self, fid):
"""Loads an acclaim skeleton format from a file stream."""
lin = self.read_line(fid)
while lin:
if lin[0]==':':
if lin[1:]== 'name':
lin = self.read_line(fid)
self.name = lin
elif lin[1:]=='units':
lin = self.read_units(fid)
elif lin[1:]=='documentation':
lin = self.read_documentation(fid)
elif lin[1:]=='root':
lin = self.read_root(fid)
elif lin[1:]=='bonedata':
lin = self.read_bonedata(fid)
elif lin[1:]=='hierarchy':
lin = self.read_hierarchy(fid)
elif lin[1:8]=='version':
lin = self.read_line(fid)
continue
else:
if not lin:
self.finalize()
return
lin = self.read_line(fid)
else:
raise ValueError('Unrecognised file format')
self.finalize()
def read_units(self, fid):
"""Read units from an acclaim skeleton file stream."""
lin = self.read_line(fid)
while lin[0] != ':':
parts = lin.split()
if parts[0]=='mass':
self.mass = float(parts[1])
elif parts[0]=='length':
self.length = float(parts[1])
elif parts[0]=='angle':
self.angle = parts[1]
lin = self.read_line(fid)
return lin
def resolve_indices(self, index, start_val):
"""Get indices for the skeleton from the channels when loading in channel data."""
channels = self.vertices[index].meta['channels']
base_channel = start_val
rot_ind = -np.ones(3, dtype=int)
pos_ind = -np.ones(3, dtype=int)
for i in range(len(channels)):
if channels[i]== 'Xrotation':
rot_ind[0] = base_channel + i
elif channels[i]=='Yrotation':
rot_ind[1] = base_channel + i
elif channels[i]=='Zrotation':
rot_ind[2] = base_channel + i
elif channels[i]=='Xposition':
pos_ind[0] = base_channel + i
elif channels[i]=='Yposition':
pos_ind[1] = base_channel + i
elif channels[i]=='Zposition':
pos_ind[2] = base_channel + i
self.vertices[index].meta['rot_ind'] = list(rot_ind)
self.vertices[index].meta['pos_ind'] = list(pos_ind)
def set_rotation_matrices(self):
"""Set the meta information at each vertex to contain the correct matrices C and Cinv as prescribed by the rotations and rotation orders."""
for i in range(len(self.vertices)):
self.vertices[i].meta['C'] = rotation_matrix(self.vertices[i].meta['axis'][0],
self.vertices[i].meta['axis'][1],
self.vertices[i].meta['axis'][2],
self.vertices[i].meta['axis_order'],
degrees=True)
# Todo: invert this by applying angle operations in reverse order
self.vertices[i].meta['Cinv'] = np.linalg.inv(self.vertices[i].meta['C'])
# Utilities for loading in x,y,z data.
def load_text_data(dataset, directory, centre=True):
"""Load in a data set of marker points from the Ohio State University C3D motion capture files (http://accad.osu.edu/research/mocap/mocap_data.htm)."""
points, point_names = parse_text(os.path.join(directory, dataset + '.txt'))[0:2]
# Remove markers where there is a NaN
present_index = [i for i in range(points[0].shape[1]) if not (np.any(np.isnan(points[0][:, i])) or np.any(np.isnan(points[0][:, i])) or np.any(np.isnan(points[0][:, i])))]
point_names = point_names[present_index]
for i in range(3):
points[i] = points[i][:, present_index]
if centre:
points[i] = (points[i].T - points[i].mean(axis=1)).T
# Concatanate the X, Y and Z markers together
Y = np.concatenate((points[0], points[1], points[2]), axis=1)
Y = Y/400.
connect = read_connections(os.path.join(directory, 'connections.txt'), point_names)
return Y, connect
def parse_text(file_name):
"""Parse data from Ohio State University text mocap files (http://accad.osu.edu/research/mocap/mocap_data.htm)."""
# Read the header
fid = open(file_name, 'r')
point_names = np.array(fid.readline().split())[2:-1:3]
fid.close()
for i in range(len(point_names)):
point_names[i] = point_names[i][0:-2]
# Read the matrix data
S = np.loadtxt(file_name, skiprows=1)
field = np.uint(S[:, 0])
times = S[:, 1]
S = S[:, 2:]
# Set the -9999.99 markers to be not present
S[S==-9999.99] = np.NaN
# Store x, y and z in different arrays
points = []
points.append(S[:, 0:-1:3])
points.append(S[:, 1:-1:3])
points.append(S[:, 2:-1:3])
return points, point_names, times
def read_connections(file_name, point_names):
"""Read a file detailing which markers should be connected to which for motion capture data."""
connections = []
fid = open(file_name, 'r')
line=fid.readline()
while(line):
connections.append(np.array(line.split(',')))
connections[-1][0] = connections[-1][0].strip()
connections[-1][1] = connections[-1][1].strip()
line = fid.readline()
connect = np.zeros((len(point_names), len(point_names)),dtype=bool)
for i in range(len(point_names)):
for j in range(len(point_names)):
for k in range(len(connections)):
if connections[k][0] == point_names[i] and connections[k][1] == point_names[j]:
connect[i,j]=True
connect[j,i]=True
break
return connect
skel = acclaim_skeleton()
| bsd-3-clause | 8,130,540,121,383,769,000 | 37.697443 | 175 | 0.476893 | false |
JoseBlanca/franklin | test/utils/seqio_utils_test.py | 1 | 3293 | '''
Created on 2009 uzt 28
@author: peio
'''
# Copyright 2009 Jose Blanca, Peio Ziarsolo, COMAV-Univ. Politecnica Valencia
# This file is part of franklin.
# franklin is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# franklin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with franklin. If not, see <http://www.gnu.org/licenses/>.
import unittest
import StringIO, tempfile, os
from franklin.utils.seqio_utils import cat, seqio
from franklin.utils.misc_utils import TEST_DATA_DIR
class TestSeqio(unittest.TestCase):
'It test the converter'
@staticmethod
def test_fastq_to_fasta_qual():
'It tests the conversion from fastq to fasta'
fcontent = '@seq1\n'
fcontent += 'CCCT\n'
fcontent += '+\n'
fcontent += ';;3;\n'
fcontent += '@SRR001666.1\n'
fcontent += 'GTTGC\n'
fcontent += '+\n'
fcontent += ';;;;;\n'
fhand = StringIO.StringIO(fcontent)
out_seq_fhand = tempfile.NamedTemporaryFile(suffix='.fasta')
out_qual_fhand = tempfile.NamedTemporaryFile(suffix='.qual')
seqio(in_seq_fhand=fhand, in_format='fastq',
out_seq_fhand=out_seq_fhand, out_qual_fhand=out_qual_fhand,
out_format='fasta')
result = '>seq1\nCCCT\n>SRR001666.1\nGTTGC\n'
assert open(out_seq_fhand.name).read() == result
qual = '>seq1\n26 26 18 26\n>SRR001666.1\n26 26 26 26 26\n'
assert open(out_qual_fhand.name).read() == qual
@staticmethod
def test_fastq_to_fastq_solexa():
'It tests the conversion using the Biopython convert function'
fcontent = '@seq1\n'
fcontent += 'CCCT\n'
fcontent += '+\n'
fcontent += ';;3;\n'
fcontent += '@SRR001666.1\n'
fcontent += 'GTTGC\n'
fcontent += '+\n'
fcontent += ';;;;;\n'
fhand = StringIO.StringIO(fcontent)
out_seq_fhand = StringIO.StringIO()
seqio(in_seq_fhand=fhand, in_format='fastq',
out_seq_fhand=out_seq_fhand, out_format='fastq-solexa')
result = '@seq1\nCCCT\n+\nZZRZ\n@SRR001666.1\nGTTGC\n+\nZZZZZ\n'
assert out_seq_fhand.getvalue() == result
class TestCat(unittest.TestCase):
'It tests the sequence converter'
@staticmethod
def test_cat():
'It tests the cat function'
inh1 = StringIO.StringIO('>seq1\nACTG\n')
inh2 = StringIO.StringIO('>seq2\nGTCA\n')
outh = StringIO.StringIO()
cat(infiles=[inh1, inh2], outfile=outh)
assert outh.getvalue() == '>seq1\nACTG\n>seq2\nGTCA\n'
#it works also with None Values
outh = StringIO.StringIO()
cat(infiles=[None, None], outfile=outh)
assert outh.getvalue() == ''
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| agpl-3.0 | -7,245,098,904,904,350,000 | 34.031915 | 77 | 0.633769 | false |
mcfletch/AutobahnPython | examples/twisted/wamp/auth/persona/server.py | 7 | 9085 | ###############################################################################
##
## Copyright (C) 2011-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import datetime
from autobahn.twisted.wamp import ApplicationSession
class TimeService(ApplicationSession):
"""
A simple time service application component.
"""
def __init__(self, realm = "realm1"):
ApplicationSession.__init__(self)
self._realm = realm
def onConnect(self):
self.join(self._realm)
def onJoin(self, details):
def utcnow():
now = datetime.datetime.utcnow()
return now.strftime("%Y-%m-%dT%H:%M:%SZ")
self.register(utcnow, 'com.timeservice.now')
from twisted.python import log
from autobahn.twisted.websocket import WampWebSocketServerProtocol, WampWebSocketServerFactory
from twisted.internet.defer import Deferred
import json
import urllib
import Cookie
from autobahn.util import newid, utcnow
from autobahn.websocket import http
class ServerProtocol(WampWebSocketServerProtocol):
## authid -> cookie -> set(connection)
def onConnect(self, request):
protocol, headers = WampWebSocketServerProtocol.onConnect(self, request)
## our cookie tracking ID
self._cbtid = None
## see if there already is a cookie set ..
if request.headers.has_key('cookie'):
try:
cookie = Cookie.SimpleCookie()
cookie.load(str(request.headers['cookie']))
except Cookie.CookieError:
pass
else:
if cookie.has_key('cbtid'):
cbtid = cookie['cbtid'].value
if self.factory._cookies.has_key(cbtid):
self._cbtid = cbtid
log.msg("Cookie already set: %s" % self._cbtid)
## if no cookie is set, create a new one ..
if self._cbtid is None:
self._cbtid = newid()
maxAge = 86400
cbtData = {'created': utcnow(),
'authenticated': None,
'maxAge': maxAge,
'connections': set()}
self.factory._cookies[self._cbtid] = cbtData
## do NOT add the "secure" cookie attribute! "secure" refers to the
## scheme of the Web page that triggered the WS, not WS itself!!
##
headers['Set-Cookie'] = 'cbtid=%s;max-age=%d' % (self._cbtid, maxAge)
log.msg("Setting new cookie: %s" % self._cbtid)
## add this WebSocket connection to the set of connections
## associated with the same cookie
self.factory._cookies[self._cbtid]['connections'].add(self)
self._authenticated = self.factory._cookies[self._cbtid]['authenticated']
## accept the WebSocket connection, speaking subprotocol `protocol`
## and setting HTTP headers `headers`
return (protocol, headers)
from autobahn.twisted.wamp import RouterSession
from autobahn.wamp import types
class MyRouterSession(RouterSession):
def onOpen(self, transport):
RouterSession.onOpen(self, transport)
print "transport authenticated: {}".format(self._transport._authenticated)
def onHello(self, realm, details):
print "onHello: {} {}".format(realm, details)
if self._transport._authenticated is not None:
return types.Accept(authid = self._transport._authenticated)
else:
return types.Challenge("mozilla-persona")
return accept
def onLeave(self, details):
if details.reason == "wamp.close.logout":
cookie = self._transport.factory._cookies[self._transport._cbtid]
cookie['authenticated'] = None
for proto in cookie['connections']:
proto.sendClose()
def onAuthenticate(self, signature, extra):
print "onAuthenticate: {} {}".format(signature, extra)
dres = Deferred()
## The client did it's Mozilla Persona authentication thing
## and now wants to verify the authentication and login.
assertion = signature
audience = 'http://127.0.0.1:8080/'
## To verify the authentication, we need to send a HTTP/POST
## to Mozilla Persona. When successful, Persona will send us
## back something like:
# {
# "audience": "http://192.168.1.130:8080/",
# "expires": 1393681951257,
# "issuer": "gmail.login.persona.org",
# "email": "tobias.oberstein@gmail.com",
# "status": "okay"
# }
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
body = urllib.urlencode({'audience': audience, 'assertion': assertion})
from twisted.web.client import getPage
d = getPage(url = "https://verifier.login.persona.org/verify",
method = 'POST',
postdata = body,
headers = headers)
log.msg("Authentication request sent.")
def done(res):
res = json.loads(res)
try:
if res['status'] == 'okay':
## Mozilla Persona successfully authenticated the user
## remember the user's email address. this marks the cookie as
## authenticated
self._transport.factory._cookies[self._transport._cbtid]['authenticated'] = res['email']
log.msg("Authenticated user {}".format(res['email']))
dres.callback(types.Accept(authid = res['email']))
else:
log.msg("Authentication failed!")
dres.callback(types.Deny())
except Exception as e:
print "ERRR", e
def error(err):
log.msg("Authentication request failed: {}".format(err.value))
dres.callback(types.Deny())
d.addCallbacks(done, error)
return dres
if __name__ == '__main__':
import sys, argparse
from twisted.python import log
from twisted.internet.endpoints import serverFromString
## parse command line arguments
##
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action = "store_true",
help = "Enable debug output.")
parser.add_argument("-c", "--component", type = str, default = None,
help = "Start WAMP-WebSocket server with this application component, e.g. 'timeservice.TimeServiceBackend', or None.")
parser.add_argument("--websocket", type = str, default = "tcp:8080",
help = 'WebSocket server Twisted endpoint descriptor, e.g. "tcp:9000" or "unix:/tmp/mywebsocket".')
parser.add_argument("--wsurl", type = str, default = "ws://localhost:8080",
help = 'WebSocket URL (must suit the endpoint), e.g. "ws://localhost:9000".')
args = parser.parse_args()
## start Twisted logging to stdout
##
if True or args.debug:
log.startLogging(sys.stdout)
## we use an Autobahn utility to install the "best" available Twisted reactor
##
from autobahn.twisted.choosereactor import install_reactor
reactor = install_reactor()
if args.debug:
print("Running on reactor {}".format(reactor))
## create a WAMP router factory
##
from autobahn.wamp.router import RouterFactory
router_factory = RouterFactory()
## create a WAMP router session factory
##
from autobahn.twisted.wamp import RouterSessionFactory
session_factory = RouterSessionFactory(router_factory)
session_factory.session = MyRouterSession
## start an embedded application component ..
##
session_factory.add(TimeService())
## create a WAMP-over-WebSocket transport server factory
##
from autobahn.twisted.websocket import WampWebSocketServerFactory
transport_factory = WampWebSocketServerFactory(session_factory, args.wsurl, debug_wamp = args.debug)
transport_factory.protocol = ServerProtocol
transport_factory._cookies = {}
transport_factory.setProtocolOptions(failByDrop = False)
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.twisted.resource import WebSocketResource
## we serve static files under "/" ..
root = File(".")
## .. and our WebSocket server under "/ws"
resource = WebSocketResource(transport_factory)
root.putChild("ws", resource)
## run both under one Twisted Web Site
site = Site(root)
## start the WebSocket server from an endpoint
##
server = serverFromString(reactor, args.websocket)
server.listen(site)
## now enter the Twisted reactor loop
##
reactor.run()
| apache-2.0 | -6,196,339,740,024,204,000 | 29.901361 | 141 | 0.628398 | false |
JamesShaeffer/QGIS | python/plugins/db_manager/dlg_export_vector.py | 62 | 8183 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : Oct 13, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
The content of this file is based on
- PG_Manager by Martin Dobias (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from builtins import str
from qgis.PyQt.QtCore import Qt, QFileInfo
from qgis.PyQt.QtWidgets import QDialog, QFileDialog, QMessageBox, QApplication
from qgis.PyQt.QtGui import QCursor
from qgis.core import (QgsVectorFileWriter,
QgsVectorDataProvider,
QgsCoordinateReferenceSystem,
QgsVectorLayerExporter,
QgsSettings)
from qgis.utils import OverrideCursor
from .ui.ui_DlgExportVector import Ui_DbManagerDlgExportVector as Ui_Dialog
class DlgExportVector(QDialog, Ui_Dialog):
def __init__(self, inLayer, inDb, parent=None):
QDialog.__init__(self, parent)
self.inLayer = inLayer
self.db = inDb
self.setupUi(self)
vectorFilterName = "lastVectorFileFilter" # "lastRasterFileFilter"
self.lastUsedVectorFilterSettingsKey = u"/UI/{0}".format(vectorFilterName)
self.lastUsedVectorDirSettingsKey = u"/UI/{0}Dir".format(vectorFilterName)
# update UI
self.setupWorkingMode()
self.populateFileFilters()
self.populateEncodings()
def setupWorkingMode(self):
# set default values
inCrs = self.inLayer.crs()
srid = inCrs.postgisSrid() if inCrs.isValid() else 4236
self.editSourceSrid.setText("%s" % srid)
self.editTargetSrid.setText("%s" % srid)
self.btnChooseOutputFile.clicked.connect(self.chooseOutputFile)
self.checkSupports()
def checkSupports(self):
""" update options available for the current input layer """
allowSpatial = self.db.connector.hasSpatialSupport()
hasGeomType = self.inLayer and self.inLayer.isSpatial()
self.chkSourceSrid.setEnabled(allowSpatial and hasGeomType)
self.chkTargetSrid.setEnabled(allowSpatial and hasGeomType)
# self.chkSpatialIndex.setEnabled(allowSpatial and hasGeomType)
def chooseOutputFile(self):
# get last used dir
settings = QgsSettings()
lastUsedDir = settings.value(self.lastUsedVectorDirSettingsKey, ".")
# get selected filter
selected_driver = self.cboFileFormat.currentData()
selected_filter = QgsVectorFileWriter.filterForDriver(selected_driver)
# ask for a filename
filename, filter = QFileDialog.getSaveFileName(self, self.tr("Choose where to save the file"), lastUsedDir,
selected_filter)
if filename == "":
return
ext = selected_filter[selected_filter.find('.'):]
ext = ext[:ext.find(' ')]
if not filename.lower().endswith(ext):
filename += ext
# store the last used dir
settings.setValue(self.lastUsedVectorDirSettingsKey, QFileInfo(filename).filePath())
self.editOutputFile.setText(filename)
def populateEncodings(self):
# populate the combo with supported encodings
self.cboEncoding.addItems(QgsVectorDataProvider.availableEncodings())
# set the last used encoding
enc = self.inLayer.dataProvider().encoding()
idx = self.cboEncoding.findText(enc)
if idx < 0:
self.cboEncoding.insertItem(0, enc)
idx = 0
self.cboEncoding.setCurrentIndex(idx)
def populateFileFilters(self):
# populate the combo with supported vector file formats
for driver in QgsVectorFileWriter.ogrDriverList():
self.cboFileFormat.addItem(driver.longName, driver.driverName)
# set the last used filter
settings = QgsSettings()
filt = settings.value(self.lastUsedVectorFilterSettingsKey, "GPKG")
idx = self.cboFileFormat.findText(filt)
if idx < 0:
idx = 0
self.cboFileFormat.setCurrentIndex(idx)
def accept(self):
# sanity checks
if self.editOutputFile.text() == "":
QMessageBox.information(self, self.tr("Export to file"), self.tr("Output file name is required"))
return
if self.chkSourceSrid.isEnabled() and self.chkSourceSrid.isChecked():
try:
sourceSrid = int(self.editSourceSrid.text())
except ValueError:
QMessageBox.information(self, self.tr("Export to file"),
self.tr("Invalid source srid: must be an integer"))
return
if self.chkTargetSrid.isEnabled() and self.chkTargetSrid.isChecked():
try:
targetSrid = int(self.editTargetSrid.text())
except ValueError:
QMessageBox.information(self, self.tr("Export to file"),
self.tr("Invalid target srid: must be an integer"))
return
with OverrideCursor(Qt.WaitCursor):
# store current input layer crs, so I can restore it later
prevInCrs = self.inLayer.crs()
try:
uri = self.editOutputFile.text()
providerName = "ogr"
options = {}
# set the OGR driver will be used
driverName = self.cboFileFormat.currentData()
options['driverName'] = driverName
# set the output file encoding
if self.chkEncoding.isEnabled() and self.chkEncoding.isChecked():
enc = self.cboEncoding.currentText()
options['fileEncoding'] = enc
if self.chkDropTable.isChecked():
options['overwrite'] = True
outCrs = QgsCoordinateReferenceSystem()
if self.chkTargetSrid.isEnabled() and self.chkTargetSrid.isChecked():
targetSrid = int(self.editTargetSrid.text())
outCrs = QgsCoordinateReferenceSystem(targetSrid)
# update input layer crs
if self.chkSourceSrid.isEnabled() and self.chkSourceSrid.isChecked():
sourceSrid = int(self.editSourceSrid.text())
inCrs = QgsCoordinateReferenceSystem(sourceSrid)
self.inLayer.setCrs(inCrs)
# do the export!
ret, errMsg = QgsVectorLayerExporter.exportLayer(self.inLayer, uri, providerName, outCrs,
False, options)
except Exception as e:
ret = -1
errMsg = str(e)
finally:
# restore input layer crs and encoding
self.inLayer.setCrs(prevInCrs)
if ret != 0:
QMessageBox.warning(self, self.tr("Export to file"), self.tr("Error {0}\n{1}").format(ret, errMsg))
return
# create spatial index
# if self.chkSpatialIndex.isEnabled() and self.chkSpatialIndex.isChecked():
# self.db.connector.createSpatialIndex( (schema, table), geom )
QMessageBox.information(self, self.tr("Export to file"), self.tr("Export finished."))
return QDialog.accept(self)
| gpl-2.0 | -2,010,138,386,699,219,700 | 39.711443 | 115 | 0.569351 | false |
mookaka/mywebblog | www/pymonitor.py | 1 | 1762 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'mookaka'
import os, sys, time, subprocess
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
command = ['echo', 'ok']
process = None
def log(s):
print('[Monitor] %s' % s)
class MyFileSystemEventHandler(FileSystemEventHandler):
def __init__(self,fn):
super(MyFileSystemEventHandler, self).__init__()
self.restart = fn
def on_any_event(self, event):
if event.src_path.endswith('.py'):
log('Python source file changed: %s' % event.src_path)
self.restart
def kill_process():
global process
if process:
log('Kill process [%s]...' % process.pid)
process.kill()
process.wait()
log('Process ended with code %s.' % process.returncode)
process = None
def start_process():
global process, command
log('Start process %s...' & ' '.join(command))
process = subprocess.Popen(command, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr)
def restart_process():
kill_process()
start_process()
def start_watch(path, callback):
observer = Observer()
observer.schedule(MyFileSystemEventHandler(restart_process), path, recursive=True)
observer.start()
log('Watching directory %s...' % path)
start_process()
try:
while True:
time.sleep(0.5)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == '__main__':
argv = sys.argv[1:]
if not argv:
print('Usage: ./pymonitor your-script.py')
exit(0)
if argv[0] != 'python3':
argv.insert(0, 'python3')
command = argv
path = os.path.abspath('.')
start_watch(path, None) | mit | 3,119,803,086,193,035,000 | 23.150685 | 94 | 0.616345 | false |
jpaalasm/pyglet | tests/window/WINDOW_SET_VSYNC.py | 29 | 2009 | #!/usr/bin/env python
'''Test that vsync can be set.
Expected behaviour:
A window will alternate between red and green fill.
- Press "v" to toggle vsync on/off. "Tearing" should only be visible
when vsync is off (as indicated at the terminal).
Not all video drivers support vsync. On Linux, check the output of
`tools/info.py`:
- If GLX_SGI_video_sync extension is present, should work as expected.
- If GLX_MESA_swap_control extension is present, should work as expected.
- If GLX_SGI_swap_control extension is present, vsync can be enabled,
but once enabled, it cannot be switched off (there will be no error
message).
- If none of these extensions are present, vsync is not supported by
your driver, but no error message or warning will be printed.
Close the window or press ESC to end the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import unittest
from pyglet import window
from pyglet.window import key
from pyglet.gl import *
class WINDOW_SET_VSYNC(unittest.TestCase):
colors = [(1, 0, 0, 1), (0, 1, 0, 1)]
color_index = 0
def open_window(self):
return window.Window(200, 200, vsync=False)
def on_key_press(self, symbol, modifiers):
if symbol == key.V:
vsync = not self.w1.vsync
self.w1.set_vsync(vsync)
print 'vsync is %r' % self.w1.vsync
def draw_window(self, window, colour):
window.switch_to()
glClearColor(*colour)
glClear(GL_COLOR_BUFFER_BIT)
window.flip()
def test_open_window(self):
self.w1 = self.open_window()
self.w1.push_handlers(self)
print 'vsync is %r' % self.w1.vsync
while not self.w1.has_exit:
self.color_index = 1 - self.color_index
self.draw_window(self.w1, self.colors[self.color_index])
self.w1.dispatch_events()
self.w1.close()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -3,449,217,366,242,979,300 | 30.390625 | 79 | 0.632653 | false |
PatKayongo/patkayongo.github.io | node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/_phpbuiltins.py | 95 | 122088 | # -*- coding: utf-8 -*-
"""
pygments.lexers._phpbuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file loads the function names and their modules from the
php webpage and generates itself.
Do not alter the MODULES dict by hand!
WARNING: the generation transfers quite much data over your
internet connection. don't run that at home, use
a server ;-)
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
MODULES = {'.NET': ['dotnet_load'],
'APC': ['apc_add',
'apc_bin_dump',
'apc_bin_dumpfile',
'apc_bin_load',
'apc_bin_loadfile',
'apc_cache_info',
'apc_cas',
'apc_clear_cache',
'apc_compile_file',
'apc_dec',
'apc_define_constants',
'apc_delete_file',
'apc_delete',
'apc_exists',
'apc_fetch',
'apc_inc',
'apc_load_constants',
'apc_sma_info',
'apc_store'],
'APD': ['apd_breakpoint',
'apd_callstack',
'apd_clunk',
'apd_continue',
'apd_croak',
'apd_dump_function_table',
'apd_dump_persistent_resources',
'apd_dump_regular_resources',
'apd_echo',
'apd_get_active_symbols',
'apd_set_pprof_trace',
'apd_set_session_trace_socket',
'apd_set_session_trace',
'apd_set_session',
'override_function',
'rename_function'],
'Aliases and deprecated Mysqli': ['mysqli_bind_param',
'mysqli_bind_result',
'mysqli_client_encoding',
'mysqli_connect',
'mysqli_disable_reads_from_master',
'mysqli_disable_rpl_parse',
'mysqli_enable_reads_from_master',
'mysqli_enable_rpl_parse',
'mysqli_escape_string',
'mysqli_execute',
'mysqli_fetch',
'mysqli_get_metadata',
'mysqli_master_query',
'mysqli_param_count',
'mysqli_report',
'mysqli_rpl_parse_enabled',
'mysqli_rpl_probe',
'mysqli_rpl_query_type',
'mysqli_send_long_data',
'mysqli_send_query',
'mysqli_set_opt',
'mysqli_slave_query'],
'Apache': ['apache_child_terminate',
'apache_get_modules',
'apache_get_version',
'apache_getenv',
'apache_lookup_uri',
'apache_note',
'apache_request_headers',
'apache_reset_timeout',
'apache_response_headers',
'apache_setenv',
'getallheaders',
'virtual'],
'Array': ['array_change_key_case',
'array_chunk',
'array_combine',
'array_count_values',
'array_diff_assoc',
'array_diff_key',
'array_diff_uassoc',
'array_diff_ukey',
'array_diff',
'array_fill_keys',
'array_fill',
'array_filter',
'array_flip',
'array_intersect_assoc',
'array_intersect_key',
'array_intersect_uassoc',
'array_intersect_ukey',
'array_intersect',
'array_key_exists',
'array_keys',
'array_map',
'array_merge_recursive',
'array_merge',
'array_multisort',
'array_pad',
'array_pop',
'array_product',
'array_push',
'array_rand',
'array_reduce',
'array_replace_recursive',
'array_replace',
'array_reverse',
'array_search',
'array_shift',
'array_slice',
'array_splice',
'array_sum',
'array_udiff_assoc',
'array_udiff_uassoc',
'array_udiff',
'array_uintersect_assoc',
'array_uintersect_uassoc',
'array_uintersect',
'array_unique',
'array_unshift',
'array_values',
'array_walk_recursive',
'array_walk',
'array',
'arsort',
'asort',
'compact',
'count',
'current',
'each',
'end',
'extract',
'in_array',
'key',
'krsort',
'ksort',
'list',
'natcasesort',
'natsort',
'next',
'pos',
'prev',
'range',
'reset',
'rsort',
'shuffle',
'sizeof',
'sort',
'uasort',
'uksort',
'usort'],
'BBCode': ['bbcode_add_element',
'bbcode_add_smiley',
'bbcode_create',
'bbcode_destroy',
'bbcode_parse',
'bbcode_set_arg_parser',
'bbcode_set_flags'],
'BC Math': ['bcadd',
'bccomp',
'bcdiv',
'bcmod',
'bcmul',
'bcpow',
'bcpowmod',
'bcscale',
'bcsqrt',
'bcsub'],
'Bzip2': ['bzclose',
'bzcompress',
'bzdecompress',
'bzerrno',
'bzerror',
'bzerrstr',
'bzflush',
'bzopen',
'bzread',
'bzwrite'],
'COM': ['com_addref',
'com_create_guid',
'com_event_sink',
'com_get_active_object',
'com_get',
'com_invoke',
'com_isenum',
'com_load_typelib',
'com_load',
'com_message_pump',
'com_print_typeinfo',
'com_propget',
'com_propput',
'com_propset',
'com_release',
'com_set',
'variant_abs',
'variant_add',
'variant_and',
'variant_cast',
'variant_cat',
'variant_cmp',
'variant_date_from_timestamp',
'variant_date_to_timestamp',
'variant_div',
'variant_eqv',
'variant_fix',
'variant_get_type',
'variant_idiv',
'variant_imp',
'variant_int',
'variant_mod',
'variant_mul',
'variant_neg',
'variant_not',
'variant_or',
'variant_pow',
'variant_round',
'variant_set_type',
'variant_set',
'variant_sub',
'variant_xor'],
'CUBRID': ['cubrid_affected_rows',
'cubrid_bind',
'cubrid_close_prepare',
'cubrid_close_request',
'cubrid_col_get',
'cubrid_col_size',
'cubrid_column_names',
'cubrid_column_types',
'cubrid_commit',
'cubrid_connect_with_url',
'cubrid_connect',
'cubrid_current_oid',
'cubrid_disconnect',
'cubrid_drop',
'cubrid_error_code_facility',
'cubrid_error_code',
'cubrid_error_msg',
'cubrid_execute',
'cubrid_fetch',
'cubrid_free_result',
'cubrid_get_charset',
'cubrid_get_class_name',
'cubrid_get_client_info',
'cubrid_get_db_parameter',
'cubrid_get_server_info',
'cubrid_get',
'cubrid_insert_id',
'cubrid_is_instance',
'cubrid_lob_close',
'cubrid_lob_export',
'cubrid_lob_get',
'cubrid_lob_send',
'cubrid_lob_size',
'cubrid_lock_read',
'cubrid_lock_write',
'cubrid_move_cursor',
'cubrid_num_cols',
'cubrid_num_rows',
'cubrid_prepare',
'cubrid_put',
'cubrid_rollback',
'cubrid_schema',
'cubrid_seq_drop',
'cubrid_seq_insert',
'cubrid_seq_put',
'cubrid_set_add',
'cubrid_set_drop',
'cubrid_version'],
'Cairo': ['cairo_create',
'cairo_font_face_get_type',
'cairo_font_face_status',
'cairo_font_options_create',
'cairo_font_options_equal',
'cairo_font_options_get_antialias',
'cairo_font_options_get_hint_metrics',
'cairo_font_options_get_hint_style',
'cairo_font_options_get_subpixel_order',
'cairo_font_options_hash',
'cairo_font_options_merge',
'cairo_font_options_set_antialias',
'cairo_font_options_set_hint_metrics',
'cairo_font_options_set_hint_style',
'cairo_font_options_set_subpixel_order',
'cairo_font_options_status',
'cairo_format_stride_for_width',
'cairo_image_surface_create_for_data',
'cairo_image_surface_create_from_png',
'cairo_image_surface_create',
'cairo_image_surface_get_data',
'cairo_image_surface_get_format',
'cairo_image_surface_get_height',
'cairo_image_surface_get_stride',
'cairo_image_surface_get_width',
'cairo_matrix_create_scale',
'cairo_matrix_create_translate',
'cairo_matrix_invert',
'cairo_matrix_multiply',
'cairo_matrix_rotate',
'cairo_matrix_transform_distance',
'cairo_matrix_transform_point',
'cairo_matrix_translate',
'cairo_pattern_add_color_stop_rgb',
'cairo_pattern_add_color_stop_rgba',
'cairo_pattern_create_for_surface',
'cairo_pattern_create_linear',
'cairo_pattern_create_radial',
'cairo_pattern_create_rgb',
'cairo_pattern_create_rgba',
'cairo_pattern_get_color_stop_count',
'cairo_pattern_get_color_stop_rgba',
'cairo_pattern_get_extend',
'cairo_pattern_get_filter',
'cairo_pattern_get_linear_points',
'cairo_pattern_get_matrix',
'cairo_pattern_get_radial_circles',
'cairo_pattern_get_rgba',
'cairo_pattern_get_surface',
'cairo_pattern_get_type',
'cairo_pattern_set_extend',
'cairo_pattern_set_filter',
'cairo_pattern_set_matrix',
'cairo_pattern_status',
'cairo_pdf_surface_create',
'cairo_pdf_surface_set_size',
'cairo_ps_get_levels',
'cairo_ps_level_to_string',
'cairo_ps_surface_create',
'cairo_ps_surface_dsc_begin_page_setup',
'cairo_ps_surface_dsc_begin_setup',
'cairo_ps_surface_dsc_comment',
'cairo_ps_surface_get_eps',
'cairo_ps_surface_restrict_to_level',
'cairo_ps_surface_set_eps',
'cairo_ps_surface_set_size',
'cairo_scaled_font_create',
'cairo_scaled_font_extents',
'cairo_scaled_font_get_ctm',
'cairo_scaled_font_get_font_face',
'cairo_scaled_font_get_font_matrix',
'cairo_scaled_font_get_font_options',
'cairo_scaled_font_get_scale_matrix',
'cairo_scaled_font_get_type',
'cairo_scaled_font_glyph_extents',
'cairo_scaled_font_status',
'cairo_scaled_font_text_extents',
'cairo_surface_copy_page',
'cairo_surface_create_similar',
'cairo_surface_finish',
'cairo_surface_flush',
'cairo_surface_get_content',
'cairo_surface_get_device_offset',
'cairo_surface_get_font_options',
'cairo_surface_get_type',
'cairo_surface_mark_dirty_rectangle',
'cairo_surface_mark_dirty',
'cairo_surface_set_device_offset',
'cairo_surface_set_fallback_resolution',
'cairo_surface_show_page',
'cairo_surface_status',
'cairo_surface_write_to_png',
'cairo_svg_surface_create',
'cairo_svg_surface_restrict_to_version',
'cairo_svg_version_to_string'],
'Calendar': ['cal_days_in_month',
'cal_from_jd',
'cal_info',
'cal_to_jd',
'easter_date',
'easter_days',
'FrenchToJD',
'GregorianToJD',
'JDDayOfWeek',
'JDMonthName',
'JDToFrench',
'JDToGregorian',
'jdtojewish',
'JDToJulian',
'jdtounix',
'JewishToJD',
'JulianToJD',
'unixtojd'],
'Classes/Object': ['call_user_method_array',
'call_user_method',
'class_alias',
'class_exists',
'get_called_class',
'get_class_methods',
'get_class_vars',
'get_class',
'get_declared_classes',
'get_declared_interfaces',
'get_object_vars',
'get_parent_class',
'interface_exists',
'is_a',
'is_subclass_of',
'method_exists',
'property_exists'],
'Classkit': ['classkit_import',
'classkit_method_add',
'classkit_method_copy',
'classkit_method_redefine',
'classkit_method_remove',
'classkit_method_rename'],
'Crack': ['crack_check',
'crack_closedict',
'crack_getlastmessage',
'crack_opendict'],
'Ctype': ['ctype_alnum',
'ctype_alpha',
'ctype_cntrl',
'ctype_digit',
'ctype_graph',
'ctype_lower',
'ctype_print',
'ctype_punct'],
'Cyrus': ['cyrus_authenticate',
'cyrus_bind',
'cyrus_close',
'cyrus_connect',
'cyrus_query',
'cyrus_unbind'],
'DB++': ['dbplus_add',
'dbplus_aql',
'dbplus_chdir',
'dbplus_close',
'dbplus_curr',
'dbplus_errcode',
'dbplus_errno',
'dbplus_find',
'dbplus_first',
'dbplus_flush',
'dbplus_freealllocks',
'dbplus_freelock',
'dbplus_freerlocks',
'dbplus_getlock',
'dbplus_getunique',
'dbplus_info',
'dbplus_last',
'dbplus_lockrel',
'dbplus_next',
'dbplus_open',
'dbplus_prev',
'dbplus_rchperm',
'dbplus_rcreate',
'dbplus_rcrtexact',
'dbplus_rcrtlike',
'dbplus_resolve',
'dbplus_restorepos',
'dbplus_rkeys',
'dbplus_ropen',
'dbplus_rquery',
'dbplus_rrename',
'dbplus_rsecindex',
'dbplus_runlink',
'dbplus_rzap',
'dbplus_savepos',
'dbplus_setindex',
'dbplus_setindexbynumber',
'dbplus_sql',
'dbplus_tcl',
'dbplus_tremove',
'dbplus_undo',
'dbplus_undoprepare',
'dbplus_unlockrel',
'dbplus_unselect',
'dbplus_update',
'dbplus_xlockrel',
'dbplus_xunlockrel'],
'DBA': ['dba_close',
'dba_delete',
'dba_exists',
'dba_fetch',
'dba_firstkey',
'dba_handlers',
'dba_insert',
'dba_key_split',
'dba_list',
'dba_nextkey',
'dba_open',
'dba_optimize',
'dba_popen',
'dba_replace',
'dba_sync'],
'DOM': ['dom_import_simplexml'],
'DOM XML (PHP 4)': ['domxml_new_doc',
'domxml_open_file',
'domxml_open_mem',
'domxml_version',
'domxml_xmltree',
'domxml_xslt_stylesheet_doc',
'domxml_xslt_stylesheet_file',
'domxml_xslt_stylesheet',
'domxml_xslt_version',
'xpath_eval_expression',
'xpath_eval',
'xpath_new_context',
'xpath_register_ns_auto',
'xpath_register_ns',
'xptr_eval',
'xptr_new_context'],
'Date/Time': ['checkdate',
'date_add',
'date_create_from_format',
'date_create',
'date_date_set',
'date_default_timezone_get',
'date_default_timezone_set',
'date_diff',
'date_format',
'date_get_last_errors',
'date_interval_create_from_date_string',
'date_interval_format',
'date_isodate_set',
'date_modify',
'date_offset_get',
'date_parse_from_format',
'date_parse',
'date_sub',
'date_sun_info',
'date_sunrise',
'date_sunset',
'date_time_set',
'date_timestamp_get',
'date_timestamp_set',
'date_timezone_get',
'date_timezone_set',
'date',
'getdate',
'gettimeofday',
'gmdate',
'gmmktime',
'gmstrftime',
'idate',
'localtime',
'microtime',
'mktime',
'strftime',
'strptime',
'strtotime',
'time',
'timezone_abbreviations_list',
'timezone_identifiers_list',
'timezone_location_get',
'timezone_name_from_abbr',
'timezone_name_get',
'timezone_offset_get',
'timezone_open',
'timezone_transitions_get',
'timezone_version_get'],
'Direct IO': ['dio_close', 'dio_fcntl', 'dio_open'],
'Directory': ['chdir',
'chroot',
'closedir',
'getcwd',
'opendir',
'readdir',
'rewinddir',
'scandir'],
'Enchant': ['enchant_broker_describe',
'enchant_broker_dict_exists',
'enchant_broker_free_dict',
'enchant_broker_free',
'enchant_broker_get_error',
'enchant_broker_init',
'enchant_broker_list_dicts',
'enchant_broker_request_dict',
'enchant_broker_request_pwl_dict',
'enchant_broker_set_ordering',
'enchant_dict_add_to_personal',
'enchant_dict_add_to_session',
'enchant_dict_check',
'enchant_dict_describe',
'enchant_dict_get_error',
'enchant_dict_is_in_session',
'enchant_dict_quick_check',
'enchant_dict_store_replacement',
'enchant_dict_suggest'],
'Error Handling': ['debug_backtrace',
'debug_print_backtrace',
'error_get_last',
'error_log',
'error_reporting',
'restore_error_handler',
'restore_exception_handler',
'set_error_handler',
'set_exception_handler',
'trigger_error',
'user_error'],
'Exif': ['exif_imagetype',
'exif_read_data',
'exif_tagname',
'exif_thumbnail',
'read_exif_data'],
'Expect': ['expect_expectl'],
'FAM': ['fam_cancel_monitor',
'fam_close',
'fam_monitor_collection',
'fam_monitor_directory',
'fam_monitor_file',
'fam_next_event',
'fam_open',
'fam_pending',
'fam_resume_monitor',
'fam_suspend_monitor'],
'FDF': ['fdf_add_doc_javascript',
'fdf_add_template',
'fdf_close',
'fdf_create',
'fdf_enum_values',
'fdf_errno',
'fdf_error',
'fdf_get_ap',
'fdf_get_attachment',
'fdf_get_encoding',
'fdf_get_file',
'fdf_get_flags',
'fdf_get_opt',
'fdf_get_status',
'fdf_get_value',
'fdf_get_version',
'fdf_header',
'fdf_next_field_name',
'fdf_open_string',
'fdf_open',
'fdf_remove_item',
'fdf_save_string',
'fdf_save',
'fdf_set_ap',
'fdf_set_encoding',
'fdf_set_file',
'fdf_set_flags',
'fdf_set_javascript_action',
'fdf_set_on_import_javascript',
'fdf_set_opt',
'fdf_set_status',
'fdf_set_submit_form_action',
'fdf_set_target_frame',
'fdf_set_value',
'fdf_set_version'],
'FTP': ['ftp_alloc',
'ftp_cdup',
'ftp_chdir',
'ftp_chmod',
'ftp_close',
'ftp_connect',
'ftp_delete',
'ftp_exec',
'ftp_fget',
'ftp_fput',
'ftp_get_option',
'ftp_get',
'ftp_login',
'ftp_mdtm',
'ftp_mkdir',
'ftp_nb_continue',
'ftp_nb_fget',
'ftp_nb_fput',
'ftp_nb_get',
'ftp_nb_put',
'ftp_nlist',
'ftp_pasv',
'ftp_put',
'ftp_pwd',
'ftp_quit',
'ftp_raw',
'ftp_rawlist',
'ftp_rename',
'ftp_rmdir',
'ftp_set_option',
'ftp_site',
'ftp_size',
'ftp_ssl_connect',
'ftp_systype'],
'Fileinfo': ['finfo_buffer',
'finfo_close',
'finfo_file',
'finfo_open',
'finfo_set_flags',
'mime_content_type'],
'Filesystem': ['basename',
'chgrp',
'chmod',
'chown',
'clearstatcache',
'copy',
'dirname',
'disk_free_space',
'disk_total_space',
'diskfreespace',
'fclose',
'feof',
'fflush',
'fgetc',
'fgetcsv',
'fgets',
'fgetss',
'file_exists',
'file_get_contents',
'file_put_contents',
'file',
'fileatime',
'filectime',
'filegroup',
'fileinode',
'filemtime',
'fileowner',
'fileperms',
'filesize',
'filetype',
'flock',
'fnmatch',
'fopen',
'fpassthru',
'fputcsv',
'fputs',
'fread',
'fscanf',
'fseek',
'fstat',
'ftell',
'ftruncate',
'fwrite',
'glob',
'is_dir',
'is_executable',
'is_file',
'is_link',
'is_readable',
'is_uploaded_file',
'is_writable',
'is_writeable',
'lchgrp',
'lchown',
'link',
'linkinfo',
'lstat',
'mkdir',
'move_uploaded_file',
'parse_ini_file',
'parse_ini_string',
'pathinfo',
'pclose',
'popen',
'readfile',
'readlink',
'realpath_cache_get',
'realpath_cache_size',
'realpath',
'rename',
'rewind',
'rmdir',
'set_file_buffer',
'stat',
'symlink',
'tempnam',
'tmpfile',
'touch',
'umask',
'unlink'],
'Filter': ['filter_has_var',
'filter_id',
'filter_input_array',
'filter_input',
'filter_list',
'filter_var_array',
'filter_var'],
'Firebird/InterBase': ['ibase_add_user',
'ibase_affected_rows',
'ibase_backup',
'ibase_blob_add',
'ibase_blob_cancel',
'ibase_blob_close',
'ibase_blob_create',
'ibase_blob_echo',
'ibase_blob_get',
'ibase_blob_import',
'ibase_blob_info',
'ibase_blob_open',
'ibase_close',
'ibase_commit_ret',
'ibase_commit',
'ibase_connect',
'ibase_db_info',
'ibase_delete_user',
'ibase_drop_db',
'ibase_errcode',
'ibase_errmsg',
'ibase_execute',
'ibase_fetch_assoc',
'ibase_fetch_object',
'ibase_fetch_row',
'ibase_field_info',
'ibase_free_event_handler',
'ibase_free_query',
'ibase_free_result',
'ibase_gen_id',
'ibase_maintain_db',
'ibase_modify_user',
'ibase_name_result',
'ibase_num_fields',
'ibase_num_params',
'ibase_param_info',
'ibase_pconnect',
'ibase_prepare',
'ibase_query',
'ibase_restore',
'ibase_rollback_ret',
'ibase_rollback',
'ibase_server_info',
'ibase_service_attach',
'ibase_service_detach',
'ibase_set_event_handler',
'ibase_timefmt',
'ibase_trans',
'ibase_wait_event'],
'FriBiDi': ['fribidi_log2vis'],
'FrontBase': ['fbsql_affected_rows',
'fbsql_autocommit',
'fbsql_blob_size',
'fbsql_change_user',
'fbsql_clob_size',
'fbsql_close',
'fbsql_commit',
'fbsql_connect',
'fbsql_create_blob',
'fbsql_create_clob',
'fbsql_create_db',
'fbsql_data_seek',
'fbsql_database_password',
'fbsql_database',
'fbsql_db_query',
'fbsql_db_status',
'fbsql_drop_db',
'fbsql_errno',
'fbsql_error',
'fbsql_fetch_array',
'fbsql_fetch_assoc',
'fbsql_fetch_field',
'fbsql_fetch_lengths',
'fbsql_fetch_object',
'fbsql_fetch_row',
'fbsql_field_flags',
'fbsql_field_len',
'fbsql_field_name',
'fbsql_field_seek',
'fbsql_field_table',
'fbsql_field_type',
'fbsql_free_result',
'fbsql_get_autostart_info',
'fbsql_hostname',
'fbsql_insert_id',
'fbsql_list_dbs',
'fbsql_list_fields',
'fbsql_list_tables',
'fbsql_next_result',
'fbsql_num_fields',
'fbsql_num_rows',
'fbsql_password',
'fbsql_pconnect',
'fbsql_query',
'fbsql_read_blob',
'fbsql_read_clob',
'fbsql_result',
'fbsql_rollback',
'fbsql_rows_fetched',
'fbsql_select_db',
'fbsql_set_characterset',
'fbsql_set_lob_mode',
'fbsql_set_password',
'fbsql_set_transaction',
'fbsql_start_db',
'fbsql_stop_db',
'fbsql_table_name',
'fbsql_tablename',
'fbsql_username',
'fbsql_warnings'],
'Function handling': ['call_user_func_array',
'call_user_func',
'create_function',
'forward_static_call_array',
'forward_static_call',
'func_get_arg',
'func_get_args',
'func_num_args',
'function_exists',
'get_defined_functions',
'register_shutdown_function',
'register_tick_function',
'unregister_tick_function'],
'GD and Image': ['gd_info',
'getimagesize',
'image_type_to_extension',
'image_type_to_mime_type'],
'GMP': ['gmp_abs',
'gmp_add',
'gmp_and',
'gmp_clrbit',
'gmp_cmp',
'gmp_com',
'gmp_div_q',
'gmp_div_qr',
'gmp_div_r',
'gmp_div',
'gmp_divexact',
'gmp_fact',
'gmp_gcd',
'gmp_gcdext',
'gmp_hamdist',
'gmp_init',
'gmp_intval',
'gmp_invert',
'gmp_jacobi',
'gmp_legendre',
'gmp_mod',
'gmp_mul',
'gmp_neg',
'gmp_nextprime',
'gmp_or',
'gmp_perfect_square',
'gmp_popcount',
'gmp_pow',
'gmp_powm',
'gmp_prob_prime',
'gmp_random',
'gmp_scan0',
'gmp_scan1',
'gmp_setbit',
'gmp_sign',
'gmp_sqrt',
'gmp_sqrtrem',
'gmp_strval',
'gmp_sub',
'gmp_testbit',
'gmp_xor'],
'GeoIP': ['geoip_continent_code_by_name',
'geoip_country_code_by_name',
'geoip_country_code3_by_name',
'geoip_country_name_by_name',
'geoip_database_info',
'geoip_db_avail',
'geoip_db_filename',
'geoip_db_get_all_info',
'geoip_id_by_name',
'geoip_isp_by_name',
'geoip_org_by_name',
'geoip_record_by_name',
'geoip_region_by_name',
'geoip_region_name_by_code',
'geoip_time_zone_by_country_and_region'],
'Gettext': ['bind_textdomain_codeset',
'bindtextdomain',
'dcgettext',
'dcngettext',
'dgettext',
'dngettext',
'gettext',
'ngettext',
'textdomain'],
'GnuPG': ['gnupg_adddecryptkey',
'gnupg_addencryptkey',
'gnupg_addsignkey',
'gnupg_cleardecryptkeys',
'gnupg_clearencryptkeys',
'gnupg_clearsignkeys',
'gnupg_decrypt',
'gnupg_decryptverify',
'gnupg_encrypt',
'gnupg_encryptsign',
'gnupg_export',
'gnupg_geterror',
'gnupg_getprotocol',
'gnupg_import',
'gnupg_init',
'gnupg_keyinfo',
'gnupg_setarmor',
'gnupg_seterrormode',
'gnupg_setsignmode',
'gnupg_sign',
'gnupg_verify'],
'Gopher': ['gopher_parsedir'],
'Grapheme': ['grapheme_extract',
'grapheme_stripos',
'grapheme_stristr',
'grapheme_strlen',
'grapheme_strpos',
'grapheme_strripos',
'grapheme_strrpos',
'grapheme_strstr',
'grapheme_substr'],
'Gupnp': ['gupnp_context_get_host_ip',
'gupnp_context_get_port',
'gupnp_context_get_subscription_timeout',
'gupnp_context_host_path',
'gupnp_context_new',
'gupnp_context_set_subscription_timeout',
'gupnp_context_timeout_add',
'gupnp_context_unhost_path',
'gupnp_control_point_browse_start',
'gupnp_control_point_browse_stop',
'gupnp_control_point_callback_set',
'gupnp_control_point_new',
'gupnp_device_action_callback_set',
'gupnp_device_info_get_service',
'gupnp_device_info_get',
'gupnp_root_device_get_available',
'gupnp_root_device_get_relative_location',
'gupnp_root_device_new',
'gupnp_root_device_set_available',
'gupnp_root_device_start',
'gupnp_root_device_stop',
'gupnp_service_action_get',
'gupnp_service_action_return_error',
'gupnp_service_action_return',
'gupnp_service_action_set',
'gupnp_service_freeze_notify',
'gupnp_service_info_get_introspection',
'gupnp_service_info_get',
'gupnp_service_introspection_get_state_variable',
'gupnp_service_notify',
'gupnp_service_proxy_action_get',
'gupnp_service_proxy_action_set',
'gupnp_service_proxy_add_notify',
'gupnp_service_proxy_callback_set',
'gupnp_service_proxy_get_subscribed',
'gupnp_service_proxy_remove_notify',
'gupnp_service_proxy_set_subscribed',
'gupnp_service_thaw_notify'],
'HTTP': ['http_cache_etag',
'http_cache_last_modified',
'http_chunked_decode',
'http_deflate',
'http_inflate',
'http_build_cookie',
'http_date',
'http_get_request_body_stream',
'http_get_request_body',
'http_get_request_headers',
'http_match_etag',
'http_match_modified',
'http_match_request_header',
'http_support',
'http_negotiate_charset',
'http_negotiate_content_type',
'http_negotiate_language',
'ob_deflatehandler',
'ob_etaghandler',
'ob_inflatehandler',
'http_parse_cookie',
'http_parse_headers',
'http_parse_message',
'http_parse_params',
'http_persistent_handles_clean',
'http_persistent_handles_count',
'http_persistent_handles_ident',
'http_get',
'http_head',
'http_post_data',
'http_post_fields',
'http_put_data',
'http_put_file',
'http_put_stream',
'http_request_body_encode',
'http_request_method_exists',
'http_request_method_name',
'http_request_method_register',
'http_request_method_unregister',
'http_request',
'http_redirect',
'http_send_content_disposition',
'http_send_content_type',
'http_send_data',
'http_send_file',
'http_send_last_modified',
'http_send_status',
'http_send_stream',
'http_throttle',
'http_build_str',
'http_build_url'],
'Hash': ['hash_algos',
'hash_copy',
'hash_file',
'hash_final',
'hash_hmac_file',
'hash_hmac',
'hash_init',
'hash_update_file',
'hash_update_stream',
'hash_update',
'hash'],
'Hyperwave': ['hw_Array2Objrec',
'hw_changeobject',
'hw_Children',
'hw_ChildrenObj',
'hw_Close',
'hw_Connect',
'hw_connection_info',
'hw_cp',
'hw_Deleteobject',
'hw_DocByAnchor',
'hw_DocByAnchorObj',
'hw_Document_Attributes',
'hw_Document_BodyTag',
'hw_Document_Content',
'hw_Document_SetContent',
'hw_Document_Size',
'hw_dummy',
'hw_EditText',
'hw_Error',
'hw_ErrorMsg',
'hw_Free_Document',
'hw_GetAnchors',
'hw_GetAnchorsObj',
'hw_GetAndLock',
'hw_GetChildColl',
'hw_GetChildCollObj',
'hw_GetChildDocColl',
'hw_GetChildDocCollObj',
'hw_GetObject',
'hw_GetObjectByQuery',
'hw_GetObjectByQueryColl',
'hw_GetObjectByQueryCollObj',
'hw_GetObjectByQueryObj',
'hw_GetParents',
'hw_GetParentsObj',
'hw_getrellink',
'hw_GetRemote',
'hw_getremotechildren',
'hw_GetSrcByDestObj',
'hw_GetText',
'hw_getusername',
'hw_Identify',
'hw_InCollections',
'hw_Info',
'hw_InsColl',
'hw_InsDoc',
'hw_insertanchors',
'hw_InsertDocument',
'hw_InsertObject',
'hw_mapid',
'hw_Modifyobject',
'hw_mv',
'hw_New_Document',
'hw_objrec2array',
'hw_Output_Document',
'hw_pConnect',
'hw_PipeDocument',
'hw_Root',
'hw_setlinkroot',
'hw_stat',
'hw_Unlock',
'hw_Who'],
'Hyperwave API': ['hw_api_attribute',
'hwapi_hgcsp',
'hw_api_content',
'hw_api_object'],
'IBM DB2': ['db2_autocommit',
'db2_bind_param',
'db2_client_info',
'db2_close',
'db2_column_privileges',
'db2_columns',
'db2_commit',
'db2_conn_error',
'db2_conn_errormsg',
'db2_connect',
'db2_cursor_type',
'db2_escape_string',
'db2_exec',
'db2_execute',
'db2_fetch_array',
'db2_fetch_assoc',
'db2_fetch_both',
'db2_fetch_object',
'db2_fetch_row',
'db2_field_display_size',
'db2_field_name',
'db2_field_num',
'db2_field_precision',
'db2_field_scale',
'db2_field_type',
'db2_field_width',
'db2_foreign_keys',
'db2_free_result',
'db2_free_stmt',
'db2_get_option',
'db2_last_insert_id'],
'ID3': ['id3_get_frame_long_name',
'id3_get_frame_short_name',
'id3_get_genre_id',
'id3_get_genre_list',
'id3_get_genre_name',
'id3_get_tag',
'id3_get_version',
'id3_remove_tag',
'id3_set_tag'],
'IDN': ['idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'],
'IIS': ['iis_add_server',
'iis_get_dir_security',
'iis_get_script_map',
'iis_get_server_by_comment',
'iis_get_server_by_path',
'iis_get_server_rights',
'iis_get_service_state',
'iis_remove_server',
'iis_set_app_settings',
'iis_set_dir_security',
'iis_set_script_map',
'iis_set_server_rights',
'iis_start_server',
'iis_start_service',
'iis_stop_server',
'iis_stop_service'],
'IMAP': ['imap_8bit',
'imap_alerts',
'imap_append',
'imap_base64',
'imap_binary',
'imap_body',
'imap_bodystruct',
'imap_check',
'imap_clearflag_full',
'imap_close',
'imap_createmailbox',
'imap_delete',
'imap_deletemailbox',
'imap_errors',
'imap_expunge',
'imap_fetch_overview',
'imap_fetchbody',
'imap_fetchheader',
'imap_fetchmime',
'imap_fetchstructure',
'imap_gc',
'imap_get_quota',
'imap_get_quotaroot',
'imap_getacl',
'imap_getmailboxes',
'imap_getsubscribed',
'imap_header',
'imap_headerinfo',
'imap_headers',
'imap_last_error',
'imap_list',
'imap_listmailbox',
'imap_listscan',
'imap_listsubscribed',
'imap_lsub',
'imap_mail_compose',
'imap_mail_copy',
'imap_mail_move',
'imap_mail',
'imap_mailboxmsginfo',
'imap_mime_header_decode',
'imap_msgno',
'imap_num_msg',
'imap_num_recent',
'imap_open',
'imap_ping',
'imap_qprint',
'imap_renamemailbox',
'imap_reopen',
'imap_rfc822_parse_adrlist',
'imap_rfc822_parse_headers',
'imap_rfc822_write_address',
'imap_savebody',
'imap_scanmailbox',
'imap_search',
'imap_set_quota',
'imap_setacl',
'imap_setflag_full',
'imap_sort',
'imap_status',
'imap_subscribe',
'imap_thread',
'imap_timeout',
'imap_uid',
'imap_undelete',
'imap_unsubscribe',
'imap_utf7_decode',
'imap_utf7_encode',
'imap_utf8'],
'Informix': ['ifx_affected_rows',
'ifx_blobinfile_mode',
'ifx_byteasvarchar',
'ifx_close',
'ifx_connect',
'ifx_copy_blob',
'ifx_create_blob',
'ifx_create_char',
'ifx_do',
'ifx_error',
'ifx_errormsg',
'ifx_fetch_row',
'ifx_fieldproperties',
'ifx_fieldtypes',
'ifx_free_blob',
'ifx_free_char',
'ifx_free_result',
'ifx_get_blob',
'ifx_get_char',
'ifx_getsqlca',
'ifx_htmltbl_result',
'ifx_nullformat',
'ifx_num_fields',
'ifx_num_rows',
'ifx_pconnect',
'ifx_prepare',
'ifx_query',
'ifx_textasvarchar',
'ifx_update_blob',
'ifx_update_char',
'ifxus_close_slob',
'ifxus_create_slob',
'ifxus_free_slob',
'ifxus_open_slob',
'ifxus_read_slob',
'ifxus_seek_slob',
'ifxus_tell_slob',
'ifxus_write_slob'],
'Ingres': ['ingres_autocommit_state',
'ingres_autocommit',
'ingres_charset',
'ingres_close',
'ingres_commit',
'ingres_connect',
'ingres_cursor',
'ingres_errno',
'ingres_error',
'ingres_errsqlstate',
'ingres_escape_string',
'ingres_execute',
'ingres_fetch_array',
'ingres_fetch_assoc',
'ingres_fetch_object',
'ingres_fetch_proc_return',
'ingres_fetch_row',
'ingres_field_length',
'ingres_field_name',
'ingres_field_nullable',
'ingres_field_precision',
'ingres_field_scale',
'ingres_field_type',
'ingres_free_result',
'ingres_next_error',
'ingres_num_fields',
'ingres_num_rows',
'ingres_pconnect',
'ingres_prepare',
'ingres_query',
'ingres_result_seek',
'ingres_rollback',
'ingres_set_environment',
'ingres_unbuffered_query'],
'Inotify': ['inotify_add_watch',
'inotify_init',
'inotify_queue_len',
'inotify_read',
'inotify_rm_watch'],
'JSON': ['json_decode', 'json_encode', 'json_last_error'],
'Java': ['java_last_exception_clear', 'java_last_exception_get'],
'Judy': ['judy_type', 'judy_version'],
'KADM5': ['kadm5_chpass_principal',
'kadm5_create_principal',
'kadm5_delete_principal',
'kadm5_destroy',
'kadm5_flush',
'kadm5_get_policies',
'kadm5_get_principal',
'kadm5_get_principals',
'kadm5_init_with_password',
'kadm5_modify_principal'],
'LDAP': ['ldap_8859_to_t61',
'ldap_add',
'ldap_bind',
'ldap_close',
'ldap_compare',
'ldap_connect',
'ldap_count_entries',
'ldap_delete',
'ldap_dn2ufn',
'ldap_err2str',
'ldap_errno',
'ldap_error',
'ldap_explode_dn',
'ldap_first_attribute',
'ldap_first_entry',
'ldap_first_reference',
'ldap_free_result',
'ldap_get_attributes',
'ldap_get_dn',
'ldap_get_entries',
'ldap_get_option',
'ldap_get_values_len',
'ldap_get_values',
'ldap_list',
'ldap_mod_add',
'ldap_mod_del',
'ldap_mod_replace',
'ldap_modify',
'ldap_next_attribute',
'ldap_next_entry',
'ldap_next_reference',
'ldap_parse_reference',
'ldap_parse_result',
'ldap_read',
'ldap_rename',
'ldap_sasl_bind',
'ldap_search',
'ldap_set_option',
'ldap_set_rebind_proc',
'ldap_sort',
'ldap_start_tls',
'ldap_t61_to_8859',
'ldap_unbind'],
'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
'Libevent': ['event_add',
'event_base_free',
'event_base_loop',
'event_base_loopbreak',
'event_base_loopexit',
'event_base_new',
'event_base_priority_init',
'event_base_set',
'event_buffer_base_set',
'event_buffer_disable',
'event_buffer_enable',
'event_buffer_fd_set',
'event_buffer_free',
'event_buffer_new',
'event_buffer_priority_set',
'event_buffer_read',
'event_buffer_set_callback',
'event_buffer_timeout_set',
'event_buffer_watermark_set',
'event_buffer_write',
'event_del',
'event_free',
'event_new',
'event_set'],
'Lotus Notes': ['notes_body',
'notes_copy_db',
'notes_create_db',
'notes_create_note',
'notes_drop_db',
'notes_find_note',
'notes_header_info',
'notes_list_msgs',
'notes_mark_read',
'notes_mark_unread',
'notes_nav_create',
'notes_search',
'notes_unread',
'notes_version'],
'MCVE': ['m_checkstatus',
'm_completeauthorizations',
'm_connect',
'm_connectionerror',
'm_deletetrans',
'm_destroyconn',
'm_destroyengine',
'm_getcell',
'm_getcellbynum',
'm_getcommadelimited',
'm_getheader',
'm_initconn',
'm_initengine',
'm_iscommadelimited',
'm_maxconntimeout',
'm_monitor',
'm_numcolumns',
'm_numrows',
'm_parsecommadelimited',
'm_responsekeys'],
'Mail': ['ezmlm_hash', 'mail'],
'Mailparse': ['mailparse_determine_best_xfer_encoding',
'mailparse_msg_create',
'mailparse_msg_extract_part_file',
'mailparse_msg_extract_part',
'mailparse_msg_extract_whole_part_file',
'mailparse_msg_free',
'mailparse_msg_get_part_data',
'mailparse_msg_get_part',
'mailparse_msg_get_structure',
'mailparse_msg_parse_file',
'mailparse_msg_parse',
'mailparse_rfc822_parse_addresses',
'mailparse_stream_encode',
'mailparse_uudecode_all'],
'Math': ['abs',
'acos',
'acosh',
'asin',
'asinh',
'atan2',
'atan',
'atanh',
'base_convert',
'bindec',
'ceil',
'cos',
'cosh',
'decbin',
'dechex',
'decoct',
'deg2rad',
'exp',
'expm1'],
'MaxDB': ['maxdb_affected_rows',
'maxdb_autocommit',
'maxdb_bind_param',
'maxdb_bind_result',
'maxdb_change_user',
'maxdb_character_set_name',
'maxdb_client_encoding',
'maxdb_close_long_data',
'maxdb_close',
'maxdb_commit',
'maxdb_connect_errno',
'maxdb_connect_error',
'maxdb_connect',
'maxdb_data_seek',
'maxdb_debug',
'maxdb_disable_reads_from_master',
'maxdb_disable_rpl_parse',
'maxdb_dump_debug_info',
'maxdb_embedded_connect',
'maxdb_enable_reads_from_master',
'maxdb_enable_rpl_parse',
'maxdb_errno',
'maxdb_error',
'maxdb_escape_string',
'maxdb_execute',
'maxdb_fetch_array',
'maxdb_fetch_assoc',
'maxdb_fetch_field_direct',
'maxdb_fetch_field',
'maxdb_fetch_fields',
'maxdb_fetch_lengths',
'maxdb_fetch_object',
'maxdb_fetch_row',
'maxdb_fetch',
'maxdb_field_count',
'maxdb_field_seek',
'maxdb_field_tell',
'maxdb_free_result',
'maxdb_get_client_info',
'maxdb_get_client_version',
'maxdb_get_host_info',
'maxdb_get_metadata',
'maxdb_get_proto_info',
'maxdb_get_server_info',
'maxdb_get_server_version',
'maxdb_info',
'maxdb_init',
'maxdb_insert_id',
'maxdb_kill',
'maxdb_master_query',
'maxdb_more_results',
'maxdb_multi_query',
'maxdb_next_result',
'maxdb_num_fields',
'maxdb_num_rows',
'maxdb_options',
'maxdb_param_count',
'maxdb_ping',
'maxdb_prepare',
'maxdb_query',
'maxdb_real_connect',
'maxdb_real_escape_string',
'maxdb_real_query',
'maxdb_report',
'maxdb_rollback',
'maxdb_rpl_parse_enabled',
'maxdb_rpl_probe',
'maxdb_rpl_query_type',
'maxdb_select_db',
'maxdb_send_long_data',
'maxdb_send_query',
'maxdb_server_end',
'maxdb_server_init',
'maxdb_set_opt',
'maxdb_sqlstate',
'maxdb_ssl_set',
'maxdb_stat',
'maxdb_stmt_affected_rows'],
'Mcrypt': ['mcrypt_cbc',
'mcrypt_cfb',
'mcrypt_create_iv',
'mcrypt_decrypt',
'mcrypt_ecb',
'mcrypt_enc_get_algorithms_name',
'mcrypt_enc_get_block_size',
'mcrypt_enc_get_iv_size',
'mcrypt_enc_get_key_size',
'mcrypt_enc_get_modes_name',
'mcrypt_enc_get_supported_key_sizes',
'mcrypt_enc_is_block_algorithm_mode',
'mcrypt_enc_is_block_algorithm',
'mcrypt_enc_is_block_mode',
'mcrypt_enc_self_test',
'mcrypt_encrypt',
'mcrypt_generic_deinit',
'mcrypt_generic_end',
'mcrypt_generic_init',
'mcrypt_generic',
'mcrypt_get_block_size',
'mcrypt_get_cipher_name',
'mcrypt_get_iv_size',
'mcrypt_get_key_size',
'mcrypt_list_algorithms',
'mcrypt_list_modes',
'mcrypt_module_close',
'mcrypt_module_get_algo_block_size',
'mcrypt_module_get_algo_key_size',
'mcrypt_module_get_supported_key_sizes',
'mcrypt_module_is_block_algorithm_mode',
'mcrypt_module_is_block_algorithm',
'mcrypt_module_is_block_mode',
'mcrypt_module_open',
'mcrypt_module_self_test',
'mcrypt_ofb',
'mdecrypt_generic'],
'Memcache': ['memcache_debug'],
'Mhash': ['mhash_count',
'mhash_get_block_size',
'mhash_get_hash_name',
'mhash_keygen_s2k',
'mhash'],
'Ming': ['ming_keypress',
'ming_setcubicthreshold',
'ming_setscale',
'ming_setswfcompression',
'ming_useconstants',
'ming_useswfversion'],
'Misc.': ['connection_aborted',
'connection_status',
'connection_timeout',
'constant',
'define',
'defined',
'die',
'eval',
'exit',
'get_browser',
'__halt_compiler',
'highlight_file',
'highlight_string',
'ignore_user_abort',
'pack',
'php_check_syntax',
'php_strip_whitespace',
'show_source',
'sleep',
'sys_getloadavg',
'time_nanosleep',
'time_sleep_until',
'uniqid',
'unpack',
'usleep'],
'Mongo': ['bson_decode', 'bson_encode'],
'Msession': ['msession_connect',
'msession_count',
'msession_create',
'msession_destroy',
'msession_disconnect',
'msession_find',
'msession_get_array',
'msession_get_data',
'msession_get',
'msession_inc',
'msession_list',
'msession_listvar',
'msession_lock',
'msession_plugin',
'msession_randstr',
'msession_set_array',
'msession_set_data',
'msession_set',
'msession_timeout',
'msession_uniq',
'msession_unlock'],
'Mssql': ['mssql_bind',
'mssql_close',
'mssql_connect',
'mssql_data_seek',
'mssql_execute',
'mssql_fetch_array',
'mssql_fetch_assoc',
'mssql_fetch_batch',
'mssql_fetch_field',
'mssql_fetch_object',
'mssql_fetch_row',
'mssql_field_length',
'mssql_field_name',
'mssql_field_seek',
'mssql_field_type',
'mssql_free_result',
'mssql_free_statement',
'mssql_get_last_message',
'mssql_guid_string',
'mssql_init',
'mssql_min_error_severity',
'mssql_min_message_severity',
'mssql_next_result',
'mssql_num_fields',
'mssql_num_rows',
'mssql_pconnect',
'mssql_query',
'mssql_result',
'mssql_rows_affected',
'mssql_select_db'],
'Multibyte String': ['mb_check_encoding',
'mb_convert_case',
'mb_convert_encoding',
'mb_convert_kana',
'mb_convert_variables',
'mb_decode_mimeheader',
'mb_decode_numericentity',
'mb_detect_encoding',
'mb_detect_order',
'mb_encode_mimeheader',
'mb_encode_numericentity',
'mb_encoding_aliases',
'mb_ereg_match',
'mb_ereg_replace',
'mb_ereg_search_getpos',
'mb_ereg_search_getregs',
'mb_ereg_search_init',
'mb_ereg_search_pos',
'mb_ereg_search_regs',
'mb_ereg_search_setpos',
'mb_ereg_search',
'mb_ereg',
'mb_eregi_replace',
'mb_eregi',
'mb_get_info',
'mb_http_input',
'mb_http_output',
'mb_internal_encoding',
'mb_language',
'mb_list_encodings',
'mb_output_handler',
'mb_parse_str',
'mb_preferred_mime_name',
'mb_regex_encoding',
'mb_regex_set_options',
'mb_send_mail',
'mb_split',
'mb_strcut',
'mb_strimwidth',
'mb_stripos',
'mb_stristr',
'mb_strlen',
'mb_strpos',
'mb_strrchr',
'mb_strrichr',
'mb_strripos',
'mb_strrpos',
'mb_strstr',
'mb_strtolower',
'mb_strtoupper',
'mb_strwidth',
'mb_substitute_character',
'mb_substr_count',
'mb_substr'],
'MySQL': ['mysql_affected_rows',
'mysql_client_encoding',
'mysql_close',
'mysql_connect',
'mysql_create_db',
'mysql_data_seek',
'mysql_db_name',
'mysql_db_query',
'mysql_drop_db',
'mysql_errno',
'mysql_error',
'mysql_escape_string',
'mysql_fetch_array',
'mysql_fetch_assoc',
'mysql_fetch_field',
'mysql_fetch_lengths',
'mysql_fetch_object',
'mysql_fetch_row',
'mysql_field_flags',
'mysql_field_len',
'mysql_field_name',
'mysql_field_seek',
'mysql_field_table',
'mysql_field_type',
'mysql_free_result',
'mysql_get_client_info',
'mysql_get_host_info',
'mysql_get_proto_info',
'mysql_get_server_info',
'mysql_info',
'mysql_insert_id',
'mysql_list_dbs',
'mysql_list_fields',
'mysql_list_processes',
'mysql_list_tables',
'mysql_num_fields',
'mysql_num_rows',
'mysql_pconnect',
'mysql_ping',
'mysql_query',
'mysql_real_escape_string',
'mysql_result',
'mysql_select_db',
'mysql_set_charset',
'mysql_stat',
'mysql_tablename',
'mysql_thread_id',
'mysql_unbuffered_query'],
'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
'Ncurses': ['ncurses_addch',
'ncurses_addchnstr',
'ncurses_addchstr',
'ncurses_addnstr',
'ncurses_addstr',
'ncurses_assume_default_colors',
'ncurses_attroff',
'ncurses_attron',
'ncurses_attrset',
'ncurses_baudrate',
'ncurses_beep',
'ncurses_bkgd',
'ncurses_bkgdset',
'ncurses_border',
'ncurses_bottom_panel',
'ncurses_can_change_color',
'ncurses_cbreak',
'ncurses_clear',
'ncurses_clrtobot',
'ncurses_clrtoeol',
'ncurses_color_content',
'ncurses_color_set',
'ncurses_curs_set',
'ncurses_def_prog_mode',
'ncurses_def_shell_mode',
'ncurses_define_key',
'ncurses_del_panel',
'ncurses_delay_output',
'ncurses_delch',
'ncurses_deleteln',
'ncurses_delwin',
'ncurses_doupdate',
'ncurses_echo',
'ncurses_echochar',
'ncurses_end',
'ncurses_erase',
'ncurses_erasechar',
'ncurses_filter',
'ncurses_flash',
'ncurses_flushinp',
'ncurses_getch',
'ncurses_getmaxyx',
'ncurses_getmouse',
'ncurses_getyx',
'ncurses_halfdelay',
'ncurses_has_colors',
'ncurses_has_ic',
'ncurses_has_il',
'ncurses_has_key',
'ncurses_hide_panel',
'ncurses_hline',
'ncurses_inch',
'ncurses_init_color',
'ncurses_init_pair',
'ncurses_init',
'ncurses_insch',
'ncurses_insdelln',
'ncurses_insertln',
'ncurses_insstr',
'ncurses_instr',
'ncurses_isendwin',
'ncurses_keyok',
'ncurses_keypad',
'ncurses_killchar',
'ncurses_longname',
'ncurses_meta',
'ncurses_mouse_trafo',
'ncurses_mouseinterval',
'ncurses_mousemask',
'ncurses_move_panel',
'ncurses_move',
'ncurses_mvaddch',
'ncurses_mvaddchnstr',
'ncurses_mvaddchstr',
'ncurses_mvaddnstr',
'ncurses_mvaddstr',
'ncurses_mvcur',
'ncurses_mvdelch',
'ncurses_mvgetch',
'ncurses_mvhline',
'ncurses_mvinch',
'ncurses_mvvline',
'ncurses_mvwaddstr',
'ncurses_napms',
'ncurses_new_panel',
'ncurses_newpad',
'ncurses_newwin',
'ncurses_nl',
'ncurses_nocbreak',
'ncurses_noecho',
'ncurses_nonl',
'ncurses_noqiflush',
'ncurses_noraw',
'ncurses_pair_content',
'ncurses_panel_above',
'ncurses_panel_below',
'ncurses_panel_window',
'ncurses_pnoutrefresh',
'ncurses_prefresh',
'ncurses_putp',
'ncurses_qiflush',
'ncurses_raw',
'ncurses_refresh',
'ncurses_replace_panel',
'ncurses_reset_prog_mode',
'ncurses_reset_shell_mode',
'ncurses_resetty',
'ncurses_savetty',
'ncurses_scr_dump',
'ncurses_scr_init',
'ncurses_scr_restore',
'ncurses_scr_set',
'ncurses_scrl',
'ncurses_show_panel',
'ncurses_slk_attr',
'ncurses_slk_attroff',
'ncurses_slk_attron',
'ncurses_slk_attrset',
'ncurses_slk_clear',
'ncurses_slk_color',
'ncurses_slk_init',
'ncurses_slk_noutrefresh',
'ncurses_slk_refresh',
'ncurses_slk_restore',
'ncurses_slk_set',
'ncurses_slk_touch',
'ncurses_standend',
'ncurses_standout',
'ncurses_start_color',
'ncurses_termattrs',
'ncurses_termname',
'ncurses_timeout',
'ncurses_top_panel',
'ncurses_typeahead',
'ncurses_ungetch',
'ncurses_ungetmouse',
'ncurses_update_panels',
'ncurses_use_default_colors',
'ncurses_use_env',
'ncurses_use_extended_names',
'ncurses_vidattr',
'ncurses_vline',
'ncurses_waddch',
'ncurses_waddstr',
'ncurses_wattroff',
'ncurses_wattron',
'ncurses_wattrset',
'ncurses_wborder',
'ncurses_wclear',
'ncurses_wcolor_set',
'ncurses_werase',
'ncurses_wgetch',
'ncurses_whline',
'ncurses_wmouse_trafo',
'ncurses_wmove',
'ncurses_wnoutrefresh',
'ncurses_wrefresh',
'ncurses_wstandend',
'ncurses_wstandout',
'ncurses_wvline'],
'Network': ['checkdnsrr',
'closelog',
'define_syslog_variables',
'dns_check_record',
'dns_get_mx',
'dns_get_record',
'fsockopen',
'gethostbyaddr',
'gethostbyname',
'gethostbynamel'],
'Newt': ['newt_bell',
'newt_button_bar',
'newt_button',
'newt_centered_window',
'newt_checkbox_get_value',
'newt_checkbox_set_flags',
'newt_checkbox_set_value',
'newt_checkbox_tree_add_item',
'newt_checkbox_tree_find_item',
'newt_checkbox_tree_get_current',
'newt_checkbox_tree_get_entry_value',
'newt_checkbox_tree_get_multi_selection',
'newt_checkbox_tree_get_selection',
'newt_checkbox_tree_multi',
'newt_checkbox_tree_set_current',
'newt_checkbox_tree_set_entry_value',
'newt_checkbox_tree_set_entry',
'newt_checkbox_tree_set_width',
'newt_checkbox_tree',
'newt_checkbox',
'newt_clear_key_buffer'],
'OAuth': ['oauth_get_sbs', 'oauth_urlencode'],
'OCI8': ['oci_bind_array_by_name',
'oci_bind_by_name',
'oci_cancel',
'oci_close',
'oci_commit',
'oci_connect',
'oci_define_by_name',
'oci_error',
'oci_execute',
'oci_fetch_all',
'oci_fetch_array',
'oci_fetch_assoc',
'oci_fetch_object',
'oci_fetch_row',
'oci_fetch',
'oci_field_is_null',
'oci_field_name',
'oci_field_precision',
'oci_field_scale',
'oci_field_size',
'oci_field_type_raw',
'oci_field_type',
'oci_free_statement',
'oci_internal_debug',
'oci_lob_copy',
'oci_lob_is_equal',
'oci_new_collection',
'oci_new_connect',
'oci_new_cursor',
'oci_new_descriptor',
'oci_num_fields',
'oci_num_rows',
'oci_parse',
'oci_password_change',
'oci_pconnect',
'oci_result',
'oci_rollback',
'oci_server_version',
'oci_set_action',
'oci_set_client_identifier',
'oci_set_client_info',
'oci_set_edition',
'oci_set_module_name',
'oci_set_prefetch',
'oci_statement_type'],
'ODBC': ['odbc_autocommit',
'odbc_binmode',
'odbc_close_all',
'odbc_close',
'odbc_columnprivileges',
'odbc_columns',
'odbc_commit',
'odbc_connect',
'odbc_cursor',
'odbc_data_source',
'odbc_do',
'odbc_error',
'odbc_errormsg',
'odbc_exec',
'odbc_execute',
'odbc_fetch_array',
'odbc_fetch_into',
'odbc_fetch_object',
'odbc_fetch_row',
'odbc_field_len',
'odbc_field_name',
'odbc_field_num',
'odbc_field_precision',
'odbc_field_scale',
'odbc_field_type',
'odbc_foreignkeys',
'odbc_free_result',
'odbc_gettypeinfo',
'odbc_longreadlen',
'odbc_next_result',
'odbc_num_fields',
'odbc_num_rows',
'odbc_pconnect',
'odbc_prepare',
'odbc_primarykeys',
'odbc_procedurecolumns',
'odbc_procedures',
'odbc_result_all',
'odbc_result',
'odbc_rollback',
'odbc_setoption',
'odbc_specialcolumns',
'odbc_statistics',
'odbc_tableprivileges',
'odbc_tables'],
'Object Aggregation': ['aggregate_info',
'aggregate_methods_by_list',
'aggregate_methods_by_regexp'],
'Object overloading': ['overload'],
'OpenAL': ['openal_buffer_create',
'openal_buffer_data',
'openal_buffer_destroy',
'openal_buffer_get',
'openal_buffer_loadwav',
'openal_context_create',
'openal_context_current',
'openal_context_destroy',
'openal_context_process',
'openal_context_suspend',
'openal_device_close',
'openal_device_open',
'openal_listener_get',
'openal_listener_set',
'openal_source_create',
'openal_source_destroy',
'openal_source_get',
'openal_source_pause',
'openal_source_play',
'openal_source_rewind',
'openal_source_set',
'openal_source_stop',
'openal_stream'],
'OpenSSL': ['openssl_csr_export_to_file',
'openssl_csr_export',
'openssl_csr_get_public_key',
'openssl_csr_get_subject',
'openssl_csr_new',
'openssl_csr_sign',
'openssl_decrypt',
'openssl_dh_compute_key',
'openssl_digest',
'openssl_encrypt',
'openssl_error_string',
'openssl_free_key',
'openssl_get_cipher_methods',
'openssl_get_md_methods',
'openssl_get_privatekey',
'openssl_get_publickey',
'openssl_open',
'openssl_pkcs12_export_to_file',
'openssl_pkcs12_export',
'openssl_pkcs12_read',
'openssl_pkcs7_decrypt',
'openssl_pkcs7_encrypt',
'openssl_pkcs7_sign',
'openssl_pkcs7_verify',
'openssl_pkey_export_to_file',
'openssl_pkey_export',
'openssl_pkey_free',
'openssl_pkey_get_details',
'openssl_pkey_get_private',
'openssl_pkey_get_public',
'openssl_pkey_new',
'openssl_private_decrypt',
'openssl_private_encrypt',
'openssl_public_decrypt',
'openssl_public_encrypt',
'openssl_random_pseudo_bytes',
'openssl_seal',
'openssl_sign',
'openssl_verify',
'openssl_x509_check_private_key',
'openssl_x509_checkpurpose',
'openssl_x509_export_to_file',
'openssl_x509_export',
'openssl_x509_free',
'openssl_x509_parse',
'openssl_x509_read'],
'Output Control': ['flush',
'ob_clean',
'ob_end_clean',
'ob_end_flush',
'ob_flush',
'ob_get_clean',
'ob_get_contents',
'ob_get_flush',
'ob_get_length',
'ob_get_level',
'ob_get_status',
'ob_gzhandler',
'ob_implicit_flush',
'ob_list_handlers',
'ob_start',
'output_add_rewrite_var',
'output_reset_rewrite_vars'],
'Ovrimos SQL': ['ovrimos_close',
'ovrimos_commit',
'ovrimos_connect',
'ovrimos_cursor',
'ovrimos_exec',
'ovrimos_execute',
'ovrimos_fetch_into',
'ovrimos_fetch_row',
'ovrimos_field_len',
'ovrimos_field_name',
'ovrimos_field_num',
'ovrimos_field_type',
'ovrimos_free_result',
'ovrimos_longreadlen',
'ovrimos_num_fields',
'ovrimos_num_rows',
'ovrimos_prepare',
'ovrimos_result_all',
'ovrimos_result',
'ovrimos_rollback'],
'PCNTL': ['pcntl_alarm',
'pcntl_exec',
'pcntl_fork',
'pcntl_getpriority',
'pcntl_setpriority',
'pcntl_signal_dispatch',
'pcntl_signal',
'pcntl_sigprocmask',
'pcntl_sigtimedwait',
'pcntl_sigwaitinfo',
'pcntl_wait',
'pcntl_waitpid',
'pcntl_wexitstatus',
'pcntl_wifexited',
'pcntl_wifsignaled',
'pcntl_wifstopped',
'pcntl_wstopsig',
'pcntl_wtermsig'],
'PCRE': ['preg_filter',
'preg_grep',
'preg_last_error',
'preg_match_all',
'preg_match',
'preg_quote',
'preg_replace_callback',
'preg_replace',
'preg_split'],
'PDF': ['PDF_activate_item',
'PDF_add_annotation',
'PDF_add_bookmark',
'PDF_add_launchlink',
'PDF_add_locallink',
'PDF_add_nameddest',
'PDF_add_note',
'PDF_add_outline',
'PDF_add_pdflink',
'PDF_add_table_cell',
'PDF_add_textflow',
'PDF_add_thumbnail',
'PDF_add_weblink',
'PDF_arc',
'PDF_arcn',
'PDF_attach_file',
'PDF_begin_document',
'PDF_begin_font',
'PDF_begin_glyph',
'PDF_begin_item',
'PDF_begin_layer',
'PDF_begin_page_ext',
'PDF_begin_page',
'PDF_begin_pattern',
'PDF_begin_template_ext',
'PDF_begin_template',
'PDF_circle',
'PDF_clip',
'PDF_close_image',
'PDF_close_pdi_page',
'PDF_close_pdi',
'PDF_close',
'PDF_closepath_fill_stroke',
'PDF_closepath_stroke',
'PDF_closepath',
'PDF_concat',
'PDF_continue_text',
'PDF_create_3dview',
'PDF_create_action',
'PDF_create_annotation',
'PDF_create_bookmark',
'PDF_create_field',
'PDF_create_fieldgroup',
'PDF_create_gstate',
'PDF_create_pvf',
'PDF_create_textflow',
'PDF_curveto',
'PDF_define_layer',
'PDF_delete_pvf',
'PDF_delete_table',
'PDF_delete_textflow',
'PDF_delete',
'PDF_encoding_set_char',
'PDF_end_document',
'PDF_end_font',
'PDF_end_glyph',
'PDF_end_item',
'PDF_end_layer',
'PDF_end_page_ext',
'PDF_end_page',
'PDF_end_pattern',
'PDF_end_template',
'PDF_endpath',
'PDF_fill_imageblock',
'PDF_fill_pdfblock',
'PDF_fill_stroke',
'PDF_fill_textblock',
'PDF_fill',
'PDF_findfont',
'PDF_fit_image',
'PDF_fit_pdi_page',
'PDF_fit_table',
'PDF_fit_textflow',
'PDF_fit_textline',
'PDF_get_apiname',
'PDF_get_buffer',
'PDF_get_errmsg',
'PDF_get_errnum',
'PDF_get_font',
'PDF_get_fontname',
'PDF_get_fontsize',
'PDF_get_image_height',
'PDF_get_image_width',
'PDF_get_majorversion',
'PDF_get_minorversion',
'PDF_get_parameter',
'PDF_get_pdi_parameter',
'PDF_get_pdi_value',
'PDF_get_value',
'PDF_info_font',
'PDF_info_matchbox',
'PDF_info_table',
'PDF_info_textflow',
'PDF_info_textline',
'PDF_initgraphics',
'PDF_lineto',
'PDF_load_3ddata',
'PDF_load_font',
'PDF_load_iccprofile',
'PDF_load_image',
'PDF_makespotcolor',
'PDF_moveto',
'PDF_new',
'PDF_open_ccitt',
'PDF_open_file',
'PDF_open_gif',
'PDF_open_image_file',
'PDF_open_image',
'PDF_open_jpeg',
'PDF_open_memory_image',
'PDF_open_pdi_document',
'PDF_open_pdi_page',
'PDF_open_pdi',
'PDF_open_tiff',
'PDF_pcos_get_number',
'PDF_pcos_get_stream',
'PDF_pcos_get_string',
'PDF_place_image',
'PDF_place_pdi_page',
'PDF_process_pdi',
'PDF_rect',
'PDF_restore',
'PDF_resume_page',
'PDF_rotate',
'PDF_save',
'PDF_scale',
'PDF_set_border_color',
'PDF_set_border_dash',
'PDF_set_border_style',
'PDF_set_char_spacing',
'PDF_set_duration',
'PDF_set_gstate',
'PDF_set_horiz_scaling',
'PDF_set_info_author',
'PDF_set_info_creator',
'PDF_set_info_keywords',
'PDF_set_info_subject',
'PDF_set_info_title',
'PDF_set_info',
'PDF_set_layer_dependency',
'PDF_set_leading',
'PDF_set_parameter',
'PDF_set_text_matrix',
'PDF_set_text_pos',
'PDF_set_text_rendering',
'PDF_set_text_rise',
'PDF_set_value',
'PDF_set_word_spacing',
'PDF_setcolor',
'PDF_setdash',
'PDF_setdashpattern',
'PDF_setflat',
'PDF_setfont',
'PDF_setgray_fill',
'PDF_setgray_stroke',
'PDF_setgray',
'PDF_setlinecap',
'PDF_setlinejoin',
'PDF_setlinewidth',
'PDF_setmatrix',
'PDF_setmiterlimit',
'PDF_setpolydash',
'PDF_setrgbcolor_fill',
'PDF_setrgbcolor_stroke',
'PDF_setrgbcolor',
'PDF_shading_pattern',
'PDF_shading',
'PDF_shfill',
'PDF_show_boxed',
'PDF_show_xy',
'PDF_show',
'PDF_skew',
'PDF_stringwidth',
'PDF_stroke',
'PDF_suspend_page',
'PDF_translate',
'PDF_utf16_to_utf8',
'PDF_utf32_to_utf16',
'PDF_utf8_to_utf16'],
'PHP Options/Info': ['assert_options',
'assert',
'dl',
'extension_loaded',
'gc_collect_cycles',
'gc_disable',
'gc_enable',
'gc_enabled',
'get_cfg_var',
'get_current_user',
'get_defined_constants',
'get_extension_funcs',
'get_include_path',
'get_included_files',
'get_loaded_extensions',
'get_magic_quotes_gpc',
'get_magic_quotes_runtime',
'get_required_files',
'getenv',
'getlastmod',
'getmygid',
'getmyinode',
'getmypid',
'getmyuid',
'getopt',
'getrusage',
'ini_alter',
'ini_get_all',
'ini_get',
'ini_restore',
'ini_set',
'magic_quotes_runtime',
'memory_get_peak_usage',
'memory_get_usage',
'php_ini_loaded_file',
'php_ini_scanned_files',
'php_logo_guid',
'php_sapi_name',
'php_uname',
'phpcredits',
'phpinfo',
'phpversion',
'putenv',
'restore_include_path',
'set_include_path',
'set_magic_quotes_runtime',
'set_time_limit',
'sys_get_temp_dir',
'version_compare',
'zend_logo_guid',
'zend_thread_id',
'zend_version'],
'POSIX': ['posix_access',
'posix_ctermid',
'posix_errno',
'posix_get_last_error',
'posix_getcwd',
'posix_getegid',
'posix_geteuid',
'posix_getgid',
'posix_getgrgid',
'posix_getgrnam',
'posix_getgroups',
'posix_getlogin',
'posix_getpgid',
'posix_getpgrp',
'posix_getpid',
'posix_getppid',
'posix_getpwnam',
'posix_getpwuid',
'posix_getrlimit',
'posix_getsid',
'posix_getuid',
'posix_initgroups',
'posix_isatty',
'posix_kill',
'posix_mkfifo',
'posix_mknod',
'posix_setegid',
'posix_seteuid',
'posix_setgid',
'posix_setpgid',
'posix_setsid',
'posix_setuid',
'posix_strerror',
'posix_times',
'posix_ttyname',
'posix_uname'],
'POSIX Regex': ['ereg_replace',
'ereg',
'eregi_replace',
'eregi',
'split',
'spliti',
'sql_regcase'],
'PS': ['ps_add_bookmark',
'ps_add_launchlink',
'ps_add_locallink',
'ps_add_note',
'ps_add_pdflink',
'ps_add_weblink',
'ps_arc',
'ps_arcn',
'ps_begin_page',
'ps_begin_pattern',
'ps_begin_template',
'ps_circle',
'ps_clip',
'ps_close_image',
'ps_close',
'ps_closepath_stroke',
'ps_closepath',
'ps_continue_text',
'ps_curveto',
'ps_delete',
'ps_end_page',
'ps_end_pattern',
'ps_end_template',
'ps_fill_stroke',
'ps_fill',
'ps_findfont',
'ps_get_buffer',
'ps_get_parameter',
'ps_get_value',
'ps_hyphenate',
'ps_include_file',
'ps_lineto',
'ps_makespotcolor',
'ps_moveto',
'ps_new',
'ps_open_file',
'ps_open_image_file',
'ps_open_image',
'ps_open_memory_image',
'ps_place_image',
'ps_rect',
'ps_restore',
'ps_rotate',
'ps_save',
'ps_scale',
'ps_set_border_color',
'ps_set_border_dash',
'ps_set_border_style',
'ps_set_info',
'ps_set_parameter',
'ps_set_text_pos',
'ps_set_value',
'ps_setcolor',
'ps_setdash',
'ps_setflat',
'ps_setfont',
'ps_setgray',
'ps_setlinecap',
'ps_setlinejoin',
'ps_setlinewidth',
'ps_setmiterlimit',
'ps_setoverprintmode',
'ps_setpolydash',
'ps_shading_pattern',
'ps_shading',
'ps_shfill',
'ps_show_boxed',
'ps_show_xy2',
'ps_show_xy',
'ps_show2',
'ps_show',
'ps_string_geometry',
'ps_stringwidth',
'ps_stroke',
'ps_symbol_name',
'ps_symbol_width',
'ps_symbol',
'ps_translate'],
'Paradox': ['px_close',
'px_create_fp',
'px_date2string',
'px_delete_record',
'px_delete',
'px_get_field',
'px_get_info',
'px_get_parameter',
'px_get_record',
'px_get_schema',
'px_get_value',
'px_insert_record',
'px_new',
'px_numfields',
'px_numrecords',
'px_open_fp',
'px_put_record',
'px_retrieve_record',
'px_set_blob_file',
'px_set_parameter',
'px_set_tablename',
'px_set_targetencoding',
'px_set_value',
'px_timestamp2string',
'px_update_record'],
'Parsekit': ['parsekit_compile_file',
'parsekit_compile_string',
'parsekit_func_arginfo'],
'PostgreSQL': ['pg_affected_rows',
'pg_cancel_query',
'pg_client_encoding',
'pg_close',
'pg_connect',
'pg_connection_busy',
'pg_connection_reset',
'pg_connection_status',
'pg_convert',
'pg_copy_from',
'pg_copy_to',
'pg_dbname',
'pg_delete',
'pg_end_copy',
'pg_escape_bytea',
'pg_escape_string',
'pg_execute',
'pg_fetch_all_columns',
'pg_fetch_all',
'pg_fetch_array',
'pg_fetch_assoc',
'pg_fetch_object',
'pg_fetch_result',
'pg_fetch_row',
'pg_field_is_null',
'pg_field_name',
'pg_field_num',
'pg_field_prtlen',
'pg_field_size',
'pg_field_table',
'pg_field_type_oid',
'pg_field_type',
'pg_free_result',
'pg_get_notify',
'pg_get_pid',
'pg_get_result',
'pg_host',
'pg_insert',
'pg_last_error',
'pg_last_notice',
'pg_last_oid',
'pg_lo_close',
'pg_lo_create',
'pg_lo_export',
'pg_lo_import',
'pg_lo_open',
'pg_lo_read_all',
'pg_lo_read',
'pg_lo_seek',
'pg_lo_tell',
'pg_lo_unlink',
'pg_lo_write',
'pg_meta_data',
'pg_num_fields',
'pg_num_rows',
'pg_options',
'pg_parameter_status',
'pg_pconnect',
'pg_ping',
'pg_port',
'pg_prepare'],
'Printer': ['printer_abort',
'printer_close',
'printer_create_brush',
'printer_create_dc',
'printer_create_font',
'printer_create_pen',
'printer_delete_brush',
'printer_delete_dc',
'printer_delete_font',
'printer_delete_pen',
'printer_draw_bmp',
'printer_draw_chord',
'printer_draw_elipse',
'printer_draw_line',
'printer_draw_pie',
'printer_draw_rectangle',
'printer_draw_roundrect',
'printer_draw_text',
'printer_end_doc',
'printer_end_page',
'printer_get_option',
'printer_list',
'printer_logical_fontheight',
'printer_open',
'printer_select_brush',
'printer_select_font',
'printer_select_pen',
'printer_set_option',
'printer_start_doc',
'printer_start_page',
'printer_write'],
'Program execution': ['escapeshellarg',
'escapeshellcmd',
'exec',
'passthru',
'proc_close',
'proc_get_status',
'proc_nice',
'proc_open',
'proc_terminate',
'shell_exec',
'system'],
'Pspell': ['pspell_add_to_personal',
'pspell_add_to_session',
'pspell_check',
'pspell_clear_session',
'pspell_config_create',
'pspell_config_data_dir',
'pspell_config_dict_dir',
'pspell_config_ignore',
'pspell_config_mode',
'pspell_config_personal',
'pspell_config_repl',
'pspell_config_runtogether',
'pspell_config_save_repl'],
'RPM Reader': ['rpm_close',
'rpm_get_tag',
'rpm_is_valid',
'rpm_open',
'rpm_version'],
'RRD': ['rrd_create',
'rrd_error',
'rrd_fetch',
'rrd_first',
'rrd_graph',
'rrd_info',
'rrd_last',
'rrd_lastupdate',
'rrd_restore',
'rrd_tune',
'rrd_update',
'rrd_xport'],
'Radius': ['radius_acct_open',
'radius_add_server',
'radius_auth_open',
'radius_close',
'radius_config',
'radius_create_request',
'radius_cvt_addr',
'radius_cvt_int',
'radius_cvt_string',
'radius_demangle_mppe_key',
'radius_demangle',
'radius_get_attr',
'radius_get_vendor_attr',
'radius_put_addr',
'radius_put_attr',
'radius_put_int',
'radius_put_string',
'radius_put_vendor_addr',
'radius_put_vendor_attr',
'radius_put_vendor_int',
'radius_put_vendor_string',
'radius_request_authenticator',
'radius_send_request',
'radius_server_secret',
'radius_strerror'],
'Rar': ['rar_wrapper_cache_stats'],
'Readline': ['readline_add_history',
'readline_callback_handler_install',
'readline_callback_handler_remove',
'readline_callback_read_char',
'readline_clear_history',
'readline_completion_function',
'readline_info',
'readline_list_history',
'readline_on_new_line',
'readline_read_history',
'readline_redisplay',
'readline_write_history',
'readline'],
'Recode': ['recode_file', 'recode_string', 'recode'],
'SNMP': ['snmp_get_quick_print',
'snmp_get_valueretrieval',
'snmp_read_mib',
'snmp_set_enum_print',
'snmp_set_oid_numeric_print',
'snmp_set_oid_output_format',
'snmp_set_quick_print',
'snmp_set_valueretrieval',
'snmp2_get',
'snmp2_getnext',
'snmp2_real_walk',
'snmp2_set',
'snmp2_walk',
'snmp3_get',
'snmp3_getnext',
'snmp3_real_walk',
'snmp3_set',
'snmp3_walk',
'snmpget',
'snmpgetnext',
'snmprealwalk',
'snmpset',
'snmpwalk',
'snmpwalkoid'],
'SOAP': ['is_soap_fault', 'use_soap_error_handler'],
'SPL': ['class_implements',
'class_parents',
'iterator_apply',
'iterator_count',
'iterator_to_array',
'spl_autoload_call',
'spl_autoload_extensions',
'spl_autoload_functions',
'spl_autoload_register',
'spl_autoload_unregister',
'spl_autoload',
'spl_classes',
'spl_object_hash'],
'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'],
'SQLite': ['sqlite_array_query', 'sqlite_busy_timeout', 'sqlite_changes'],
'SSH2': ['ssh2_auth_hostbased_file',
'ssh2_auth_none',
'ssh2_auth_password',
'ssh2_auth_pubkey_file',
'ssh2_connect',
'ssh2_exec',
'ssh2_fetch_stream',
'ssh2_fingerprint',
'ssh2_methods_negotiated',
'ssh2_publickey_add',
'ssh2_publickey_init',
'ssh2_publickey_list',
'ssh2_publickey_remove',
'ssh2_scp_recv',
'ssh2_scp_send',
'ssh2_sftp_lstat',
'ssh2_sftp_mkdir',
'ssh2_sftp_readlink',
'ssh2_sftp_realpath',
'ssh2_sftp_rename',
'ssh2_sftp_rmdir',
'ssh2_sftp_stat',
'ssh2_sftp_symlink',
'ssh2_sftp_unlink',
'ssh2_sftp',
'ssh2_shell',
'ssh2_tunnel'],
'SVN': ['svn_add',
'svn_auth_get_parameter',
'svn_auth_set_parameter',
'svn_blame',
'svn_cat',
'svn_checkout',
'svn_cleanup',
'svn_client_version',
'svn_commit',
'svn_delete',
'svn_diff',
'svn_export',
'svn_fs_abort_txn',
'svn_fs_apply_text',
'svn_fs_begin_txn2',
'svn_fs_change_node_prop',
'svn_fs_check_path',
'svn_fs_contents_changed',
'svn_fs_copy',
'svn_fs_delete',
'svn_fs_dir_entries',
'svn_fs_file_contents',
'svn_fs_file_length',
'svn_fs_is_dir',
'svn_fs_is_file',
'svn_fs_make_dir',
'svn_fs_make_file',
'svn_fs_node_created_rev',
'svn_fs_node_prop',
'svn_fs_props_changed',
'svn_fs_revision_prop',
'svn_fs_revision_root',
'svn_fs_txn_root',
'svn_fs_youngest_rev',
'svn_import',
'svn_log',
'svn_ls',
'svn_mkdir',
'svn_repos_create',
'svn_repos_fs_begin_txn_for_commit',
'svn_repos_fs_commit_txn',
'svn_repos_fs',
'svn_repos_hotcopy',
'svn_repos_open',
'svn_repos_recover',
'svn_revert',
'svn_status',
'svn_update'],
'SWF': ['swf_actiongeturl',
'swf_actiongotoframe',
'swf_actiongotolabel',
'swf_actionnextframe',
'swf_actionplay',
'swf_actionprevframe',
'swf_actionsettarget',
'swf_actionstop',
'swf_actiontogglequality',
'swf_actionwaitforframe',
'swf_addbuttonrecord',
'swf_addcolor',
'swf_closefile',
'swf_definebitmap',
'swf_definefont',
'swf_defineline',
'swf_definepoly',
'swf_definerect',
'swf_definetext',
'swf_endbutton',
'swf_enddoaction',
'swf_endshape',
'swf_endsymbol',
'swf_fontsize',
'swf_fontslant',
'swf_fonttracking',
'swf_getbitmapinfo',
'swf_getfontinfo',
'swf_getframe',
'swf_labelframe',
'swf_lookat',
'swf_modifyobject',
'swf_mulcolor',
'swf_nextid',
'swf_oncondition',
'swf_openfile',
'swf_ortho2',
'swf_ortho',
'swf_perspective',
'swf_placeobject',
'swf_polarview',
'swf_popmatrix',
'swf_posround',
'swf_pushmatrix',
'swf_removeobject',
'swf_rotate',
'swf_scale',
'swf_setfont',
'swf_setframe',
'swf_shapearc',
'swf_shapecurveto3',
'swf_shapecurveto',
'swf_shapefillbitmapclip',
'swf_shapefillbitmaptile',
'swf_shapefilloff',
'swf_shapefillsolid',
'swf_shapelinesolid',
'swf_shapelineto',
'swf_shapemoveto',
'swf_showframe',
'swf_startbutton',
'swf_startdoaction',
'swf_startshape',
'swf_startsymbol',
'swf_textwidth',
'swf_translate',
'swf_viewport'],
'Semaphore': ['ftok',
'msg_get_queue',
'msg_queue_exists',
'msg_receive',
'msg_remove_queue',
'msg_send',
'msg_set_queue',
'msg_stat_queue',
'sem_acquire',
'sem_get',
'sem_release',
'sem_remove',
'shm_attach',
'shm_detach',
'shm_get_var',
'shm_has_var',
'shm_put_var',
'shm_remove_var',
'shm_remove'],
'Session': ['session_cache_expire',
'session_cache_limiter',
'session_commit',
'session_decode',
'session_destroy',
'session_encode',
'session_get_cookie_params',
'session_id',
'session_is_registered',
'session_module_name',
'session_name',
'session_regenerate_id',
'session_register',
'session_save_path',
'session_set_cookie_params',
'session_set_save_handler',
'session_start',
'session_unregister',
'session_unset',
'session_write_close'],
'Session PgSQL': ['session_pgsql_add_error',
'session_pgsql_get_error',
'session_pgsql_get_field',
'session_pgsql_reset',
'session_pgsql_set_field',
'session_pgsql_status'],
'Shared Memory': ['shmop_close',
'shmop_delete',
'shmop_open',
'shmop_read',
'shmop_size',
'shmop_write'],
'SimpleXML': ['simplexml_import_dom',
'simplexml_load_file',
'simplexml_load_string'],
'Socket': ['socket_accept',
'socket_bind',
'socket_clear_error',
'socket_close',
'socket_connect',
'socket_create_listen',
'socket_create_pair',
'socket_create',
'socket_get_option',
'socket_getpeername',
'socket_getsockname',
'socket_last_error',
'socket_listen',
'socket_read',
'socket_recv',
'socket_recvfrom',
'socket_select',
'socket_send',
'socket_sendto',
'socket_set_block',
'socket_set_nonblock',
'socket_set_option',
'socket_shutdown',
'socket_strerror',
'socket_write'],
'Solr': ['solr_get_version'],
'Statistic': ['stats_absolute_deviation',
'stats_cdf_beta',
'stats_cdf_binomial',
'stats_cdf_cauchy',
'stats_cdf_chisquare',
'stats_cdf_exponential',
'stats_cdf_f',
'stats_cdf_gamma',
'stats_cdf_laplace',
'stats_cdf_logistic',
'stats_cdf_negative_binomial',
'stats_cdf_noncentral_chisquare',
'stats_cdf_noncentral_f',
'stats_cdf_poisson',
'stats_cdf_t',
'stats_cdf_uniform',
'stats_cdf_weibull',
'stats_covariance',
'stats_den_uniform',
'stats_dens_beta',
'stats_dens_cauchy',
'stats_dens_chisquare',
'stats_dens_exponential',
'stats_dens_f',
'stats_dens_gamma',
'stats_dens_laplace',
'stats_dens_logistic',
'stats_dens_negative_binomial',
'stats_dens_normal',
'stats_dens_pmf_binomial',
'stats_dens_pmf_hypergeometric',
'stats_dens_pmf_poisson',
'stats_dens_t',
'stats_dens_weibull',
'stats_harmonic_mean',
'stats_kurtosis',
'stats_rand_gen_beta',
'stats_rand_gen_chisquare',
'stats_rand_gen_exponential',
'stats_rand_gen_f',
'stats_rand_gen_funiform',
'stats_rand_gen_gamma',
'stats_rand_gen_ibinomial_negative',
'stats_rand_gen_ibinomial',
'stats_rand_gen_int',
'stats_rand_gen_ipoisson',
'stats_rand_gen_iuniform',
'stats_rand_gen_noncenral_chisquare',
'stats_rand_gen_noncentral_f',
'stats_rand_gen_noncentral_t',
'stats_rand_gen_normal',
'stats_rand_gen_t',
'stats_rand_get_seeds',
'stats_rand_phrase_to_seeds',
'stats_rand_ranf',
'stats_rand_setall',
'stats_skew',
'stats_standard_deviation',
'stats_stat_binomial_coef',
'stats_stat_correlation',
'stats_stat_gennch',
'stats_stat_independent_t',
'stats_stat_innerproduct',
'stats_stat_noncentral_t',
'stats_stat_paired_t',
'stats_stat_percentile',
'stats_stat_powersum',
'stats_variance'],
'Stomp': ['stomp_connect_error', 'stomp_version'],
'Stream': ['set_socket_blocking',
'stream_bucket_append',
'stream_bucket_make_writeable',
'stream_bucket_new',
'stream_bucket_prepend',
'stream_context_create',
'stream_context_get_default',
'stream_context_get_options',
'stream_context_get_params',
'stream_context_set_default',
'stream_context_set_option',
'stream_context_set_params',
'stream_copy_to_stream',
'stream_encoding',
'stream_filter_append',
'stream_filter_prepend',
'stream_filter_register',
'stream_filter_remove',
'stream_get_contents',
'stream_get_filters',
'stream_get_line',
'stream_get_meta_data',
'stream_get_transports',
'stream_get_wrappers',
'stream_is_local',
'stream_notification_callback',
'stream_register_wrapper',
'stream_resolve_include_path',
'stream_select'],
'String': ['addcslashes',
'addslashes',
'bin2hex',
'chop',
'chr',
'chunk_split',
'convert_cyr_string',
'convert_uudecode',
'convert_uuencode',
'count_chars',
'crc32',
'crypt',
'echo',
'explode',
'fprintf',
'get_html_translation_table',
'hebrev',
'hebrevc',
'html_entity_decode',
'htmlentities',
'htmlspecialchars_decode',
'htmlspecialchars',
'implode',
'join',
'lcfirst',
'levenshtein',
'localeconv',
'ltrim',
'md5_file',
'md5',
'metaphone',
'money_format',
'nl_langinfo',
'nl2br',
'number_format',
'ord',
'parse_str',
'print',
'printf',
'quoted_printable_decode',
'quoted_printable_encode',
'quotemeta',
'rtrim',
'setlocale',
'sha1_file',
'sha1',
'similar_text',
'soundex',
'sprintf',
'sscanf',
'str_getcsv',
'str_ireplace',
'str_pad',
'str_repeat',
'str_replace',
'str_rot13',
'str_shuffle',
'str_split',
'str_word_count',
'strcasecmp',
'strchr',
'strcmp',
'strcoll',
'strcspn',
'strip_tags',
'stripcslashes',
'stripos',
'stripslashes',
'stristr',
'strlen',
'strnatcasecmp',
'strnatcmp',
'strncasecmp',
'strncmp',
'strpbrk',
'strpos',
'strrchr',
'strrev',
'strripos',
'strrpos',
'strspn'],
'Sybase': ['sybase_affected_rows',
'sybase_close',
'sybase_connect',
'sybase_data_seek',
'sybase_deadlock_retry_count',
'sybase_fetch_array',
'sybase_fetch_assoc',
'sybase_fetch_field',
'sybase_fetch_object',
'sybase_fetch_row',
'sybase_field_seek',
'sybase_free_result',
'sybase_get_last_message',
'sybase_min_client_severity',
'sybase_min_error_severity',
'sybase_min_message_severity',
'sybase_min_server_severity',
'sybase_num_fields',
'sybase_num_rows',
'sybase_pconnect',
'sybase_query',
'sybase_result',
'sybase_select_db',
'sybase_set_message_handler',
'sybase_unbuffered_query'],
'TCP': ['tcpwrap_check'],
'Tidy': ['ob_tidyhandler',
'tidy_access_count',
'tidy_config_count',
'tidy_error_count',
'tidy_get_error_buffer',
'tidy_get_output',
'tidy_load_config',
'tidy_reset_config',
'tidy_save_config',
'tidy_set_encoding',
'tidy_setopt',
'tidy_warning_count'],
'Tokenizer': ['token_get_all', 'token_name'],
'URL': ['base64_decode',
'base64_encode',
'get_headers',
'get_meta_tags',
'http_build_query',
'parse_url',
'rawurldecode',
'rawurlencode',
'urldecode',
'urlencode'],
'Variable handling': ['debug_zval_dump',
'doubleval',
'empty',
'floatval',
'get_defined_vars',
'get_resource_type',
'gettype',
'import_request_variables',
'intval',
'is_array',
'is_bool',
'is_callable',
'is_double',
'is_float',
'is_int',
'is_integer',
'is_long',
'is_null',
'is_numeric',
'is_object',
'is_real',
'is_resource',
'is_scalar',
'is_string',
'isset',
'print_r',
'serialize',
'settype',
'strval',
'unserialize',
'unset',
'var_dump',
'var_export'],
'W32api': ['w32api_deftype',
'w32api_init_dtype',
'w32api_invoke_function',
'w32api_register_function',
'w32api_set_call_method'],
'WDDX': ['wddx_add_vars',
'wddx_deserialize',
'wddx_packet_end',
'wddx_packet_start',
'wddx_serialize_value',
'wddx_serialize_vars',
'wddx_unserialize'],
'WinCache': ['wincache_fcache_fileinfo',
'wincache_fcache_meminfo',
'wincache_lock',
'wincache_ocache_fileinfo',
'wincache_ocache_meminfo',
'wincache_refresh_if_changed',
'wincache_rplist_fileinfo',
'wincache_rplist_meminfo',
'wincache_scache_info',
'wincache_scache_meminfo',
'wincache_ucache_add',
'wincache_ucache_cas',
'wincache_ucache_clear',
'wincache_ucache_dec',
'wincache_ucache_delete',
'wincache_ucache_exists',
'wincache_ucache_get',
'wincache_ucache_inc',
'wincache_ucache_info',
'wincache_ucache_meminfo',
'wincache_ucache_set',
'wincache_unlock'],
'XML Parser': ['utf8_decode'],
'XML-RPC': ['xmlrpc_decode_request',
'xmlrpc_decode',
'xmlrpc_encode_request',
'xmlrpc_encode',
'xmlrpc_get_type',
'xmlrpc_is_fault',
'xmlrpc_parse_method_descriptions',
'xmlrpc_server_add_introspection_data',
'xmlrpc_server_call_method',
'xmlrpc_server_create',
'xmlrpc_server_destroy',
'xmlrpc_server_register_introspection_callback',
'xmlrpc_server_register_method',
'xmlrpc_set_type'],
'XSLT (PHP4)': ['xslt_backend_info',
'xslt_backend_name',
'xslt_backend_version',
'xslt_create',
'xslt_errno',
'xslt_error',
'xslt_free',
'xslt_getopt',
'xslt_process',
'xslt_set_base',
'xslt_set_encoding',
'xslt_set_error_handler',
'xslt_set_log',
'xslt_set_object',
'xslt_set_sax_handler',
'xslt_set_sax_handlers',
'xslt_set_scheme_handler',
'xslt_set_scheme_handlers',
'xslt_setopt'],
'YAZ': ['yaz_addinfo',
'yaz_ccl_conf',
'yaz_ccl_parse',
'yaz_close',
'yaz_connect',
'yaz_database',
'yaz_element',
'yaz_errno',
'yaz_error',
'yaz_es_result',
'yaz_es',
'yaz_get_option',
'yaz_hits',
'yaz_itemorder',
'yaz_present',
'yaz_range',
'yaz_record',
'yaz_scan_result',
'yaz_scan',
'yaz_schema',
'yaz_search',
'yaz_set_option',
'yaz_sort',
'yaz_syntax',
'yaz_wait'],
'YP/NIS': ['yp_all',
'yp_cat',
'yp_err_string',
'yp_errno',
'yp_first',
'yp_get_default_domain',
'yp_master',
'yp_match',
'yp_next',
'yp_order'],
'Yaml': ['yaml_emit_file',
'yaml_emit',
'yaml_parse_file',
'yaml_parse_url',
'yaml_parse'],
'Zip': ['zip_close',
'zip_entry_close',
'zip_entry_compressedsize',
'zip_entry_compressionmethod',
'zip_entry_filesize',
'zip_entry_name',
'zip_entry_open',
'zip_entry_read',
'zip_open',
'zip_read'],
'Zlib': ['gzclose',
'gzcompress',
'gzdecode',
'gzdeflate',
'gzencode',
'gzeof',
'gzfile',
'gzgetc',
'gzgets',
'gzgetss',
'gzinflate',
'gzopen',
'gzpassthru',
'gzputs',
'gzread',
'gzrewind',
'gzseek',
'gztell',
'gzuncompress',
'gzwrite',
'readgzfile',
'zlib_get_coding_type'],
'bcompiler': ['bcompiler_load_exe',
'bcompiler_load',
'bcompiler_parse_class',
'bcompiler_read',
'bcompiler_write_class',
'bcompiler_write_constant',
'bcompiler_write_exe_footer',
'bcompiler_write_file',
'bcompiler_write_footer',
'bcompiler_write_function',
'bcompiler_write_functions_from_file',
'bcompiler_write_header',
'bcompiler_write_included_filename'],
'cURL': ['curl_close',
'curl_copy_handle',
'curl_errno',
'curl_error',
'curl_exec',
'curl_getinfo',
'curl_init',
'curl_multi_add_handle',
'curl_multi_close',
'curl_multi_exec',
'curl_multi_getcontent',
'curl_multi_info_read',
'curl_multi_init',
'curl_multi_remove_handle',
'curl_multi_select',
'curl_setopt_array',
'curl_setopt',
'curl_version'],
'chdb': ['chdb_create'],
'dBase': ['dbase_add_record',
'dbase_close',
'dbase_create',
'dbase_delete_record',
'dbase_get_header_info',
'dbase_get_record_with_names',
'dbase_get_record',
'dbase_numfields',
'dbase_numrecords',
'dbase_open',
'dbase_pack',
'dbase_replace_record'],
'dbx': ['dbx_close',
'dbx_compare',
'dbx_connect',
'dbx_error',
'dbx_escape_string',
'dbx_fetch_row'],
'filePro': ['filepro_fieldcount',
'filepro_fieldname',
'filepro_fieldtype',
'filepro_fieldwidth',
'filepro_retrieve',
'filepro_rowcount',
'filepro'],
'iconv': ['iconv_get_encoding',
'iconv_mime_decode_headers',
'iconv_mime_decode',
'iconv_mime_encode',
'iconv_set_encoding',
'iconv_strlen',
'iconv_strpos',
'iconv_strrpos',
'iconv_substr',
'iconv',
'ob_iconv_handler'],
'inclued': ['inclued_get_data'],
'intl': ['intl_error_name',
'intl_get_error_code',
'intl_get_error_message',
'intl_is_failure'],
'libxml': ['libxml_clear_errors',
'libxml_disable_entity_loader',
'libxml_get_errors',
'libxml_get_last_error',
'libxml_set_streams_context',
'libxml_use_internal_errors'],
'mSQL': ['msql_affected_rows',
'msql_close',
'msql_connect',
'msql_create_db',
'msql_createdb',
'msql_data_seek',
'msql_db_query',
'msql_dbname',
'msql_drop_db',
'msql_error',
'msql_fetch_array',
'msql_fetch_field',
'msql_fetch_object',
'msql_fetch_row',
'msql_field_flags',
'msql_field_len',
'msql_field_name',
'msql_field_seek',
'msql_field_table',
'msql_field_type',
'msql_fieldflags',
'msql_fieldlen',
'msql_fieldname',
'msql_fieldtable',
'msql_fieldtype',
'msql_free_result',
'msql_list_dbs',
'msql_list_fields',
'msql_list_tables',
'msql_num_fields',
'msql_num_rows',
'msql_numfields',
'msql_numrows',
'msql_pconnect',
'msql_query',
'msql_regcase',
'msql_result',
'msql_select_db',
'msql_tablename',
'msql'],
'mnoGoSearch': ['udm_add_search_limit',
'udm_alloc_agent_array',
'udm_alloc_agent',
'udm_api_version',
'udm_cat_list',
'udm_cat_path',
'udm_check_charset',
'udm_check_stored',
'udm_clear_search_limits',
'udm_close_stored',
'udm_crc32',
'udm_errno',
'udm_error',
'udm_find',
'udm_free_agent',
'udm_free_ispell_data',
'udm_free_res',
'udm_get_doc_count',
'udm_get_res_field',
'udm_get_res_param',
'udm_hash32',
'udm_load_ispell_data',
'udm_open_stored',
'udm_set_agent_param'],
'mqseries': ['mqseries_back',
'mqseries_begin',
'mqseries_close',
'mqseries_cmit',
'mqseries_conn',
'mqseries_connx',
'mqseries_disc',
'mqseries_get',
'mqseries_inq',
'mqseries_open',
'mqseries_put1',
'mqseries_put',
'mqseries_set',
'mqseries_strerror'],
'mysqlnd_qc': ['mysqlnd_qc_change_handler',
'mysqlnd_qc_clear_cache',
'mysqlnd_qc_get_cache_info',
'mysqlnd_qc_get_core_stats',
'mysqlnd_qc_get_handler',
'mysqlnd_qc_get_query_trace_log',
'mysqlnd_qc_set_user_handlers'],
'qtdom': ['qdom_error', 'qdom_tree'],
'runkit': ['runkit_class_adopt',
'runkit_class_emancipate',
'runkit_constant_add',
'runkit_constant_redefine',
'runkit_constant_remove',
'runkit_function_add',
'runkit_function_copy',
'runkit_function_redefine',
'runkit_function_remove',
'runkit_function_rename',
'runkit_import',
'runkit_lint_file',
'runkit_lint',
'runkit_method_add',
'runkit_method_copy',
'runkit_method_redefine',
'runkit_method_remove',
'runkit_method_rename',
'runkit_return_value_used',
'runkit_sandbox_output_handler',
'runkit_superglobals'],
'ssdeep': ['ssdeep_fuzzy_compare',
'ssdeep_fuzzy_hash_filename',
'ssdeep_fuzzy_hash'],
'vpopmail': ['vpopmail_add_alias_domain_ex',
'vpopmail_add_alias_domain',
'vpopmail_add_domain_ex',
'vpopmail_add_domain',
'vpopmail_add_user',
'vpopmail_alias_add',
'vpopmail_alias_del_domain',
'vpopmail_alias_del',
'vpopmail_alias_get_all',
'vpopmail_alias_get',
'vpopmail_auth_user',
'vpopmail_del_domain_ex',
'vpopmail_del_domain',
'vpopmail_del_user',
'vpopmail_error',
'vpopmail_passwd',
'vpopmail_set_user_quota'],
'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'],
'win32service': ['win32_continue_service',
'win32_create_service',
'win32_delete_service',
'win32_get_last_control_message',
'win32_pause_service',
'win32_query_service_status',
'win32_set_service_status',
'win32_start_service_ctrl_dispatcher',
'win32_start_service',
'win32_stop_service'],
'xattr': ['xattr_get',
'xattr_list',
'xattr_remove',
'xattr_set',
'xattr_supported'],
'xdiff': ['xdiff_file_bdiff_size',
'xdiff_file_bdiff',
'xdiff_file_bpatch',
'xdiff_file_diff_binary',
'xdiff_file_diff',
'xdiff_file_merge3',
'xdiff_file_patch_binary',
'xdiff_file_patch',
'xdiff_file_rabdiff',
'xdiff_string_bdiff_size',
'xdiff_string_bdiff',
'xdiff_string_bpatch',
'xdiff_string_diff_binary',
'xdiff_string_diff',
'xdiff_string_merge3',
'xdiff_string_patch_binary',
'xdiff_string_patch',
'xdiff_string_rabdiff']}
if __name__ == '__main__':
import glob
import os
import pprint
import re
import shutil
import tarfile
import urllib.request, urllib.parse, urllib.error
PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
PHP_MANUAL_DIR = './php-chunked-xhtml/'
PHP_REFERENCE_GLOB = 'ref.*'
PHP_FUNCTION_RE = '<a href="function\..*?\.html">(.*?)</a>'
PHP_MODULE_RE = '<title>(.*?) Functions</title>'
def get_php_functions():
function_re = re.compile(PHP_FUNCTION_RE)
module_re = re.compile(PHP_MODULE_RE)
modules = {}
for file in get_php_references():
module = ''
for line in open(file):
if not module:
search = module_re.search(line)
if search:
module = search.group(1)
modules[module] = []
elif '<h2>Table of Contents</h2>' in line:
for match in function_re.finditer(line):
fn = match.group(1)
if '->' not in fn and '::' not in fn:
modules[module].append(fn)
# These are dummy manual pages, not actual functions
if module == 'PHP Options/Info':
modules[module].remove('main')
elif module == 'Filesystem':
modules[module].remove('delete')
if not modules[module]:
del modules[module]
break
return modules
def get_php_references():
download = urllib.request.urlretrieve(PHP_MANUAL_URL)
tar = tarfile.open(download[0])
tar.extractall()
tar.close()
for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
yield file
os.remove(download[0])
def regenerate(filename, modules):
f = open(filename)
try:
content = f.read()
finally:
f.close()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
f = open(filename, 'w')
f.write(header)
f.write('MODULES = %s\n\n' % pprint.pformat(modules))
f.write(footer)
f.close()
def run():
print('>> Downloading Function Index')
modules = get_php_functions()
total = sum(len(v) for v in modules.values())
print('%d functions found' % total)
regenerate(__file__, modules)
shutil.rmtree(PHP_MANUAL_DIR)
run()
| mit | 5,825,247,256,443,771,000 | 31.238711 | 87 | 0.441067 | false |
mkennedy04/knodj | env/Lib/site-packages/django/contrib/auth/decorators.py | 117 | 3021 | from functools import wraps
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import PermissionDenied
from django.shortcuts import resolve_url
from django.utils.decorators import available_attrs
from django.utils.six.moves.urllib.parse import urlparse
def user_passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator
def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_authenticated(),
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def permission_required(perm, login_url=None, raise_exception=False):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
If the raise_exception parameter is given the PermissionDenied exception
is raised.
"""
def check_perms(user):
if not isinstance(perm, (list, tuple)):
perms = (perm, )
else:
perms = perm
# First check if the user has the permission (even anon users)
if user.has_perms(perms):
return True
# In case the 403 handler should be called raise the exception
if raise_exception:
raise PermissionDenied
# As the last resort, show the login form
return False
return user_passes_test(check_perms, login_url=login_url)
| mit | -4,808,921,252,019,324,000 | 39.824324 | 91 | 0.66435 | false |
ihipi/Sick-Beard | sickbeard/name_cache.py | 14 | 2259 | # Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from sickbeard import db
from sickbeard.helpers import sanitizeSceneName
def addNameToCache(name, tvdb_id):
"""
Adds the show & tvdb id to the scene_names table in cache.db.
name: The show name to cache
tvdb_id: The tvdb id that this show should be cached with (can be None/0 for unknown)
"""
# standardize the name we're using to account for small differences in providers
name = sanitizeSceneName(name)
if not tvdb_id:
tvdb_id = 0
cacheDB = db.DBConnection('cache.db')
cacheDB.action("INSERT INTO scene_names (tvdb_id, name) VALUES (?, ?)", [tvdb_id, name])
def retrieveNameFromCache(name):
"""
Looks up the given name in the scene_names table in cache.db.
name: The show name to look up.
Returns: the tvdb id that resulted from the cache lookup or None if the show wasn't found in the cache
"""
# standardize the name we're using to account for small differences in providers
name = sanitizeSceneName(name)
cacheDB = db.DBConnection('cache.db')
cache_results = cacheDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
if not cache_results:
return None
return int(cache_results[0]["tvdb_id"])
def clearCache():
"""
Deletes all "unknown" entries from the cache (names with tvdb_id of 0).
"""
cacheDB = db.DBConnection('cache.db')
cacheDB.action("DELETE FROM scene_names WHERE tvdb_id = ?", [0])
| gpl-3.0 | 7,818,124,436,004,360,000 | 31.716418 | 106 | 0.679062 | false |
roy2220/srs | trunk/research/code-statistic/csr.py | 5 | 3514 | #!/usr/bin/python
'''
The MIT License (MIT)
Copyright (c) 2013-2016 SRS(ossrs)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
#################################################################################
# to stat the code and comments lines
#################################################################################
import sys, os, cs
from cs import info, trace
if __name__ != "__main__":
print "donot support lib"
sys.exit(-1)
filters="*.*pp,*.h,*.c,*.cc"
except_filters="utest,doc"
if len(sys.argv) <= 1:
print "to stat the code and comments lines"
print "Usage: python %s <dir> [filters] [except_filters]"%(sys.argv[0])
print " dir: the dir contains the files to stat"
print " filters: the file filters, default: *.*pp,*.h,*.c,*.cc"
print " except_filters: the except file filters, default: utest,doc"
print "Example:"
print " python %s src"%(sys.argv[0])
print " python %s src *.*pp,*.cc utest,doc"%(sys.argv[0])
sys.exit(-1)
dir = sys.argv[1]
if len(sys.argv) > 2:
filters = sys.argv[2]
if len(sys.argv) > 3:
except_filters = sys.argv[3]
info("stat dir:%s, filters:%s, except_filters:%s"%(dir, filters, except_filters))
# filters to array
filters = filters.split(",")
except_filters = except_filters.split(",")
# find src -name "*.*pp"|grep -v utest
(totals, stat_codes, commentss, stat_block_commentss, stat_line_commentss) = (0, 0, 0, 0, 0)
for filter in filters:
cmd = 'find %s -name "%s"'%(dir, filter)
for ef in except_filters:
cmd = '%s|%s'%(cmd, 'grep -v "%s"'%(ef))
cmd = "%s 2>&1"%(cmd)
info("scan dir, cmd:%s"%cmd)
pipe = os.popen(cmd)
files = pipe.read()
info("scan dir, files:%s"%files)
pipe.close()
files = files.split("\n")
for file in files:
file = file.strip()
if len(file) == 0:
continue;
info("start stat file:%s"%file)
(code, total, stat_code, comments, stat_block_comments, stat_line_comments, code_file) = cs.do_stat(file)
if code != 0:
continue;
totals += total
stat_codes += stat_code
commentss += comments
stat_block_commentss += stat_block_comments
stat_line_commentss += stat_line_comments
if totals == 0:
trace("no code or comments found.")
else:
trace("total:%s code:%s comments:%s(%.2f%%) block:%s line:%s"%(
totals, stat_codes, commentss, commentss * 100.0 / totals, stat_block_commentss, stat_line_commentss
))
| mit | -9,024,249,489,790,281,000 | 36.784946 | 113 | 0.626636 | false |
hawk-lord/gnucash | src/optional/python-bindings/example_scripts/quotes_historic.py | 13 | 2473 | #!/usr/bin/env python
# quotes_historic.py -- Example Script to read historic quote data into gnucash
#
## @file
# @brief Example Script to read historic stock data into gnucash
# @author Peter Holtermann
# @date January 2011
# @ingroup python_bindings_examples
#
# Call the perl-script @code
# ./get_quotes.pl INTC
# @endcode first to achieve data into file INTC which can thereafter be imported to GnuCash using this script.
#
# For explanation of use have a look at the wiki:
# http://wiki.gnucash.org/wiki/Stocks/get_prices
#
from gnucash import Session, Account, Split
import gnucash
import datetime
from fractions import Fraction
from gnc_convenience import find_account
FILE = "./test.gnucash"
url = "xml://"+FILE
# Read data from file
f = open('INTC')
data = []
while 1:
tmp = f.readline()
if(len(tmp)<2):
break
data.append(tmp)
f.close()
stock_date = []
stock_price = []
for i in range(1,len(data)):
year = int(data[i].rsplit(',')[1].rsplit('/')[0])
month = int(data[i].rsplit(',')[1].rsplit('/')[1])
day = int(data[i].rsplit(',')[1].rsplit('/')[2])
stock_date.append(datetime.datetime(year,month,day))
stock_price.append(float(data[i].rsplit(',')[5]))
# Initialize Gnucash session
session = Session(url, True, False, False)
root = session.book.get_root_account()
book = session.book
account = book.get_root_account()
pdb = book.get_price_db()
comm_table = book.get_table()
ac = find_account(account,'Intel')[0]
stock = ac.GetCommodity()
# Add the prices
pdb = book.get_price_db()
if len(ac.GetSplitList())<1:
print 'Need at least one Split to get currency info ... '
raise SystemExit
cur = ac.GetSplitList()[0].GetParent().GetCurrency()
# Get stock data
pl = pdb.get_prices(stock,cur)
if len(pl)<1:
print 'Need at least one database entry to clone ...'
raise SystemExit
pl0 = pl[0]
for i in range(1,len(pl)):
pdb.remove_price(pl[i])
for i in range(0,len(stock_date)):
p_new = pl0.clone(book)
p_new = gnucash.GncPrice(instance=p_new)
print 'Adding',i,stock_date[i],stock_price[i]
p_new.set_time(stock_date[i])
v = p_new.get_value()
v.num = int(Fraction.from_float(stock_price[i]).limit_denominator(100000).numerator)
v.denom = int(Fraction.from_float(stock_price[i]).limit_denominator(100000).denominator)
p_new.set_value(v)
p_new.set_source("Finance::Quotes::Historic")
pdb.add_price(p_new)
# Clean up
session.save()
session.end()
session.destroy()
| gpl-2.0 | 1,794,412,660,025,033,700 | 25.880435 | 112 | 0.680954 | false |
PopCap/GameIdea | Engine/Source/ThirdParty/HTML5/emsdk/Win64/python/2.7.5.3_64bit/Lib/ctypes/test/test_cast.py | 81 | 3212 | from ctypes import *
import unittest
import sys
class Test(unittest.TestCase):
def test_array2pointer(self):
array = (c_int * 3)(42, 17, 2)
# casting an array to a pointer works.
ptr = cast(array, POINTER(c_int))
self.assertEqual([ptr[i] for i in range(3)], [42, 17, 2])
if 2*sizeof(c_short) == sizeof(c_int):
ptr = cast(array, POINTER(c_short))
if sys.byteorder == "little":
self.assertEqual([ptr[i] for i in range(6)],
[42, 0, 17, 0, 2, 0])
else:
self.assertEqual([ptr[i] for i in range(6)],
[0, 42, 0, 17, 0, 2])
def test_address2pointer(self):
array = (c_int * 3)(42, 17, 2)
address = addressof(array)
ptr = cast(c_void_p(address), POINTER(c_int))
self.assertEqual([ptr[i] for i in range(3)], [42, 17, 2])
ptr = cast(address, POINTER(c_int))
self.assertEqual([ptr[i] for i in range(3)], [42, 17, 2])
def test_p2a_objects(self):
array = (c_char_p * 5)()
self.assertEqual(array._objects, None)
array[0] = "foo bar"
self.assertEqual(array._objects, {'0': "foo bar"})
p = cast(array, POINTER(c_char_p))
# array and p share a common _objects attribute
self.assertTrue(p._objects is array._objects)
self.assertEqual(array._objects, {'0': "foo bar", id(array): array})
p[0] = "spam spam"
self.assertEqual(p._objects, {'0': "spam spam", id(array): array})
self.assertTrue(array._objects is p._objects)
p[1] = "foo bar"
self.assertEqual(p._objects, {'1': 'foo bar', '0': "spam spam", id(array): array})
self.assertTrue(array._objects is p._objects)
def test_other(self):
p = cast((c_int * 4)(1, 2, 3, 4), POINTER(c_int))
self.assertEqual(p[:4], [1,2, 3, 4])
self.assertEqual(p[:4:], [1, 2, 3, 4])
self.assertEqual(p[3:-1:-1], [4, 3, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
c_int()
self.assertEqual(p[:4], [1, 2, 3, 4])
self.assertEqual(p[:4:], [1, 2, 3, 4])
self.assertEqual(p[3:-1:-1], [4, 3, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
p[2] = 96
self.assertEqual(p[:4], [1, 2, 96, 4])
self.assertEqual(p[:4:], [1, 2, 96, 4])
self.assertEqual(p[3:-1:-1], [4, 96, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
c_int()
self.assertEqual(p[:4], [1, 2, 96, 4])
self.assertEqual(p[:4:], [1, 2, 96, 4])
self.assertEqual(p[3:-1:-1], [4, 96, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
def test_char_p(self):
# This didn't work: bad argument to internal function
s = c_char_p("hiho")
self.assertEqual(cast(cast(s, c_void_p), c_char_p).value,
"hiho")
try:
c_wchar_p
except NameError:
pass
else:
def test_wchar_p(self):
s = c_wchar_p("hiho")
self.assertEqual(cast(cast(s, c_void_p), c_wchar_p).value,
"hiho")
if __name__ == "__main__":
unittest.main()
| bsd-2-clause | -3,888,873,864,979,485,700 | 35.089888 | 90 | 0.502179 | false |
zuku1985/scikit-learn | sklearn/utils/tests/test_multiclass.py | 58 | 14316 |
from __future__ import division
import numpy as np
import scipy.sparse as sp
from itertools import product
from sklearn.externals.six.moves import xrange
from sklearn.externals.six import iteritems
from scipy.sparse import issparse
from scipy.sparse import csc_matrix
from scipy.sparse import csr_matrix
from scipy.sparse import coo_matrix
from scipy.sparse import dok_matrix
from scipy.sparse import lil_matrix
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regex
from sklearn.utils.multiclass import unique_labels
from sklearn.utils.multiclass import is_multilabel
from sklearn.utils.multiclass import type_of_target
from sklearn.utils.multiclass import class_distribution
from sklearn.utils.multiclass import check_classification_targets
from sklearn.utils.metaestimators import _safe_split
from sklearn.model_selection import ShuffleSplit
from sklearn.svm import SVC
from sklearn import datasets
class NotAnArray(object):
"""An object that is convertable to an array. This is useful to
simulate a Pandas timeseries."""
def __init__(self, data):
self.data = data
def __array__(self, dtype=None):
return self.data
EXAMPLES = {
'multilabel-indicator': [
# valid when the data is formatted as sparse or dense, identified
# by CSR format when the testing takes place
csr_matrix(np.random.RandomState(42).randint(2, size=(10, 10))),
csr_matrix(np.array([[0, 1], [1, 0]])),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.bool)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.int8)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.uint8)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.float)),
csr_matrix(np.array([[0, 1], [1, 0]], dtype=np.float32)),
csr_matrix(np.array([[0, 0], [0, 0]])),
csr_matrix(np.array([[0, 1]])),
# Only valid when data is dense
np.array([[-1, 1], [1, -1]]),
np.array([[-3, 3], [3, -3]]),
NotAnArray(np.array([[-3, 3], [3, -3]])),
],
'multiclass': [
[1, 0, 2, 2, 1, 4, 2, 4, 4, 4],
np.array([1, 0, 2]),
np.array([1, 0, 2], dtype=np.int8),
np.array([1, 0, 2], dtype=np.uint8),
np.array([1, 0, 2], dtype=np.float),
np.array([1, 0, 2], dtype=np.float32),
np.array([[1], [0], [2]]),
NotAnArray(np.array([1, 0, 2])),
[0, 1, 2],
['a', 'b', 'c'],
np.array([u'a', u'b', u'c']),
np.array([u'a', u'b', u'c'], dtype=object),
np.array(['a', 'b', 'c'], dtype=object),
],
'multiclass-multioutput': [
np.array([[1, 0, 2, 2], [1, 4, 2, 4]]),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.int8),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.uint8),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.float),
np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.float32),
np.array([['a', 'b'], ['c', 'd']]),
np.array([[u'a', u'b'], [u'c', u'd']]),
np.array([[u'a', u'b'], [u'c', u'd']], dtype=object),
np.array([[1, 0, 2]]),
NotAnArray(np.array([[1, 0, 2]])),
],
'binary': [
[0, 1],
[1, 1],
[],
[0],
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1]),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.bool),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.int8),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.uint8),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.float),
np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.float32),
np.array([[0], [1]]),
NotAnArray(np.array([[0], [1]])),
[1, -1],
[3, 5],
['a'],
['a', 'b'],
['abc', 'def'],
np.array(['abc', 'def']),
[u'a', u'b'],
np.array(['abc', 'def'], dtype=object),
],
'continuous': [
[1e-5],
[0, .5],
np.array([[0], [.5]]),
np.array([[0], [.5]], dtype=np.float32),
],
'continuous-multioutput': [
np.array([[0, .5], [.5, 0]]),
np.array([[0, .5], [.5, 0]], dtype=np.float32),
np.array([[0, .5]]),
],
'unknown': [
[[]],
[()],
# sequence of sequences that weren't supported even before deprecation
np.array([np.array([]), np.array([1, 2, 3])], dtype=object),
[np.array([]), np.array([1, 2, 3])],
[set([1, 2, 3]), set([1, 2])],
[frozenset([1, 2, 3]), frozenset([1, 2])],
# and also confusable as sequences of sequences
[{0: 'a', 1: 'b'}, {0: 'a'}],
# empty second dimension
np.array([[], []]),
# 3d
np.array([[[0, 1], [2, 3]], [[4, 5], [6, 7]]]),
]
}
NON_ARRAY_LIKE_EXAMPLES = [
set([1, 2, 3]),
{0: 'a', 1: 'b'},
{0: [5], 1: [5]},
'abc',
frozenset([1, 2, 3]),
None,
]
MULTILABEL_SEQUENCES = [
[[1], [2], [0, 1]],
[(), (2), (0, 1)],
np.array([[], [1, 2]], dtype='object'),
NotAnArray(np.array([[], [1, 2]], dtype='object'))
]
def test_unique_labels():
# Empty iterable
assert_raises(ValueError, unique_labels)
# Multiclass problem
assert_array_equal(unique_labels(xrange(10)), np.arange(10))
assert_array_equal(unique_labels(np.arange(10)), np.arange(10))
assert_array_equal(unique_labels([4, 0, 2]), np.array([0, 2, 4]))
# Multilabel indicator
assert_array_equal(unique_labels(np.array([[0, 0, 1],
[1, 0, 1],
[0, 0, 0]])),
np.arange(3))
assert_array_equal(unique_labels(np.array([[0, 0, 1],
[0, 0, 0]])),
np.arange(3))
# Several arrays passed
assert_array_equal(unique_labels([4, 0, 2], xrange(5)),
np.arange(5))
assert_array_equal(unique_labels((0, 1, 2), (0,), (2, 1)),
np.arange(3))
# Border line case with binary indicator matrix
assert_raises(ValueError, unique_labels, [4, 0, 2], np.ones((5, 5)))
assert_raises(ValueError, unique_labels, np.ones((5, 4)), np.ones((5, 5)))
assert_array_equal(unique_labels(np.ones((4, 5)), np.ones((5, 5))),
np.arange(5))
def test_unique_labels_non_specific():
# Test unique_labels with a variety of collected examples
# Smoke test for all supported format
for format in ["binary", "multiclass", "multilabel-indicator"]:
for y in EXAMPLES[format]:
unique_labels(y)
# We don't support those format at the moment
for example in NON_ARRAY_LIKE_EXAMPLES:
assert_raises(ValueError, unique_labels, example)
for y_type in ["unknown", "continuous", 'continuous-multioutput',
'multiclass-multioutput']:
for example in EXAMPLES[y_type]:
assert_raises(ValueError, unique_labels, example)
def test_unique_labels_mixed_types():
# Mix with binary or multiclass and multilabel
mix_clf_format = product(EXAMPLES["multilabel-indicator"],
EXAMPLES["multiclass"] +
EXAMPLES["binary"])
for y_multilabel, y_multiclass in mix_clf_format:
assert_raises(ValueError, unique_labels, y_multiclass, y_multilabel)
assert_raises(ValueError, unique_labels, y_multilabel, y_multiclass)
assert_raises(ValueError, unique_labels, [[1, 2]], [["a", "d"]])
assert_raises(ValueError, unique_labels, ["1", 2])
assert_raises(ValueError, unique_labels, [["1", 2], [1, 3]])
assert_raises(ValueError, unique_labels, [["1", "2"], [2, 3]])
def test_is_multilabel():
for group, group_examples in iteritems(EXAMPLES):
if group in ['multilabel-indicator']:
dense_assert_, dense_exp = assert_true, 'True'
else:
dense_assert_, dense_exp = assert_false, 'False'
for example in group_examples:
# Only mark explicitly defined sparse examples as valid sparse
# multilabel-indicators
if group == 'multilabel-indicator' and issparse(example):
sparse_assert_, sparse_exp = assert_true, 'True'
else:
sparse_assert_, sparse_exp = assert_false, 'False'
if (issparse(example) or
(hasattr(example, '__array__') and
np.asarray(example).ndim == 2 and
np.asarray(example).dtype.kind in 'biuf' and
np.asarray(example).shape[1] > 0)):
examples_sparse = [sparse_matrix(example)
for sparse_matrix in [coo_matrix,
csc_matrix,
csr_matrix,
dok_matrix,
lil_matrix]]
for exmpl_sparse in examples_sparse:
sparse_assert_(is_multilabel(exmpl_sparse),
msg=('is_multilabel(%r)'
' should be %s')
% (exmpl_sparse, sparse_exp))
# Densify sparse examples before testing
if issparse(example):
example = example.toarray()
dense_assert_(is_multilabel(example),
msg='is_multilabel(%r) should be %s'
% (example, dense_exp))
def test_check_classification_targets():
for y_type in EXAMPLES.keys():
if y_type in ["unknown", "continuous", 'continuous-multioutput']:
for example in EXAMPLES[y_type]:
msg = 'Unknown label type: '
assert_raises_regex(ValueError, msg,
check_classification_targets, example)
else:
for example in EXAMPLES[y_type]:
check_classification_targets(example)
# @ignore_warnings
def test_type_of_target():
for group, group_examples in iteritems(EXAMPLES):
for example in group_examples:
assert_equal(type_of_target(example), group,
msg=('type_of_target(%r) should be %r, got %r'
% (example, group, type_of_target(example))))
for example in NON_ARRAY_LIKE_EXAMPLES:
msg_regex = 'Expected array-like \(array or non-string sequence\).*'
assert_raises_regex(ValueError, msg_regex, type_of_target, example)
for example in MULTILABEL_SEQUENCES:
msg = ('You appear to be using a legacy multi-label data '
'representation. Sequence of sequences are no longer supported;'
' use a binary array or sparse matrix instead.')
assert_raises_regex(ValueError, msg, type_of_target, example)
def test_class_distribution():
y = np.array([[1, 0, 0, 1],
[2, 2, 0, 1],
[1, 3, 0, 1],
[4, 2, 0, 1],
[2, 0, 0, 1],
[1, 3, 0, 1]])
# Define the sparse matrix with a mix of implicit and explicit zeros
data = np.array([1, 2, 1, 4, 2, 1, 0, 2, 3, 2, 3, 1, 1, 1, 1, 1, 1])
indices = np.array([0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 5, 0, 1, 2, 3, 4, 5])
indptr = np.array([0, 6, 11, 11, 17])
y_sp = sp.csc_matrix((data, indices, indptr), shape=(6, 4))
classes, n_classes, class_prior = class_distribution(y)
classes_sp, n_classes_sp, class_prior_sp = class_distribution(y_sp)
classes_expected = [[1, 2, 4],
[0, 2, 3],
[0],
[1]]
n_classes_expected = [3, 3, 1, 1]
class_prior_expected = [[3/6, 2/6, 1/6],
[1/3, 1/3, 1/3],
[1.0],
[1.0]]
for k in range(y.shape[1]):
assert_array_almost_equal(classes[k], classes_expected[k])
assert_array_almost_equal(n_classes[k], n_classes_expected[k])
assert_array_almost_equal(class_prior[k], class_prior_expected[k])
assert_array_almost_equal(classes_sp[k], classes_expected[k])
assert_array_almost_equal(n_classes_sp[k], n_classes_expected[k])
assert_array_almost_equal(class_prior_sp[k], class_prior_expected[k])
# Test again with explicit sample weights
(classes,
n_classes,
class_prior) = class_distribution(y, [1.0, 2.0, 1.0, 2.0, 1.0, 2.0])
(classes_sp,
n_classes_sp,
class_prior_sp) = class_distribution(y, [1.0, 2.0, 1.0, 2.0, 1.0, 2.0])
class_prior_expected = [[4/9, 3/9, 2/9],
[2/9, 4/9, 3/9],
[1.0],
[1.0]]
for k in range(y.shape[1]):
assert_array_almost_equal(classes[k], classes_expected[k])
assert_array_almost_equal(n_classes[k], n_classes_expected[k])
assert_array_almost_equal(class_prior[k], class_prior_expected[k])
assert_array_almost_equal(classes_sp[k], classes_expected[k])
assert_array_almost_equal(n_classes_sp[k], n_classes_expected[k])
assert_array_almost_equal(class_prior_sp[k], class_prior_expected[k])
def test_safe_split_with_precomputed_kernel():
clf = SVC()
clfp = SVC(kernel="precomputed")
iris = datasets.load_iris()
X, y = iris.data, iris.target
K = np.dot(X, X.T)
cv = ShuffleSplit(test_size=0.25, random_state=0)
train, test = list(cv.split(X))[0]
X_train, y_train = _safe_split(clf, X, y, train)
K_train, y_train2 = _safe_split(clfp, K, y, train)
assert_array_almost_equal(K_train, np.dot(X_train, X_train.T))
assert_array_almost_equal(y_train, y_train2)
X_test, y_test = _safe_split(clf, X, y, test, train)
K_test, y_test2 = _safe_split(clfp, K, y, test, train)
assert_array_almost_equal(K_test, np.dot(X_test, X_train.T))
assert_array_almost_equal(y_test, y_test2)
| bsd-3-clause | 511,293,348,379,041,200 | 37.278075 | 79 | 0.534088 | false |
bformet/django-admin-bootstrapped | django_admin_bootstrapped/renderers.py | 20 | 2302 | from __future__ import absolute_import
from django.contrib.auth.forms import ReadOnlyPasswordHashWidget
from django.contrib.admin.widgets import (AdminDateWidget, AdminTimeWidget,
AdminSplitDateTime, RelatedFieldWidgetWrapper)
from django.forms import (FileInput, CheckboxInput, RadioSelect, CheckboxSelectMultiple)
from bootstrap3 import renderers
try:
from bootstrap3.utils import add_css_class
except ImportError:
from bootstrap3.html import add_css_class
from bootstrap3.text import text_value
class BootstrapFieldRenderer(renderers.FieldRenderer):
"""
A django-bootstrap3 field renderer that renders just the field
"""
def render(self):
# Hidden input requires no special treatment
if self.field.is_hidden:
return text_value(self.field)
# Render the widget
self.add_widget_attrs()
html = self.field.as_widget(attrs=self.widget.attrs)
return html
def add_class_attrs(self, widget=None):
if not widget:
widget = self.widget
# for multiwidgets we recursively update classes for each sub-widget
if isinstance(widget, AdminSplitDateTime):
for w in widget.widgets:
self.add_class_attrs(w)
return
classes = widget.attrs.get('class', '')
if isinstance(widget, ReadOnlyPasswordHashWidget):
classes = add_css_class(classes, 'form-control-static', prepend=True)
elif isinstance(widget, (AdminDateWidget,
AdminTimeWidget,
RelatedFieldWidgetWrapper)):
# for some admin widgets we don't want the input to take full horizontal space
classes = add_css_class(classes, 'form-control form-control-inline', prepend=True)
elif not isinstance(widget, (CheckboxInput,
RadioSelect,
CheckboxSelectMultiple,
FileInput)):
classes = add_css_class(classes, 'form-control', prepend=True)
# For these widget types, add the size class here
classes = add_css_class(classes, self.get_size_class())
widget.attrs['class'] = classes
| apache-2.0 | -3,745,653,991,261,056,500 | 42.433962 | 94 | 0.629018 | false |
yuanzhao/gpdb | src/test/tinc/tincrepo/mpp/lib/mpp_tl.py | 12 | 1154 | """
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mpp.models.mpp_tc import _MPPMetaClassType
class MPPTestLib(object):
# MPPTestLib class is of type MPPMetaClassType
# MPPMetaClassType will take of reconfiguring the bases of all the derived classes that have product-specific hidden libraries
__metaclass__ = _MPPMetaClassType
def __init__(self):
self.make_me_product_agnostic()
super(MPPTestLib, self).__init__()
class __gpdbMPPTestLib__(MPPTestLib):
pass
class __hawqMPPTestLib__(MPPTestLib):
pass
| apache-2.0 | -7,988,780,380,947,063,000 | 36.225806 | 130 | 0.759099 | false |
ryanolson/couchdb-python | perftest.py | 7 | 1440 | """
Simple peformance tests.
"""
import sys
import time
import couchdb
def main():
print 'sys.version : %r' % (sys.version,)
print 'sys.platform : %r' % (sys.platform,)
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
tests = [test for test in tests if test.__name__ in sys.argv[1:]]
server = couchdb.Server()
for test in tests:
_run(server, test)
def _run(server, func):
"""Run a test in a clean db and log its execution time."""
sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip()))
sys.stdout.flush()
db_name = 'couchdb-python/perftest'
db = server.create(db_name)
try:
try:
start = time.time()
func(db)
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop - start,))
sys.stdout.flush()
except Exception, e:
sys.stdout.write("FAILED - %r\n" % (unicode(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
db.save({'_id': unicode(i)})
def create_bulk_docs(db):
"""Create lots of docs, lots at a time"""
batch_size = 100
num_batches = 1000
for i in range(num_batches):
db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
main()
| bsd-3-clause | -1,501,872,776,643,004,400 | 23 | 86 | 0.546528 | false |
kunalgrover05/Mediawiki-pagelang | maintenance/language/zhtable/Makefile.py | 63 | 14541 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author Philip
import tarfile as tf
import zipfile as zf
import os, re, shutil, sys, platform
pyversion = platform.python_version()
islinux = platform.system().lower() == 'linux'
if pyversion[:3] in ['2.6', '2.7']:
import urllib as urllib_request
import codecs
open = codecs.open
_unichr = unichr
if sys.maxunicode < 0x10000:
def unichr(i):
if i < 0x10000:
return _unichr(i)
else:
return _unichr( 0xD7C0 + ( i>>10 ) ) + _unichr( 0xDC00 + ( i & 0x3FF ) )
elif pyversion[:2] == '3.':
import urllib.request as urllib_request
unichr = chr
def unichr2( *args ):
return [unichr( int( i.split('<')[0][2:], 16 ) ) for i in args]
def unichr3( *args ):
return [unichr( int( i[2:7], 16 ) ) for i in args if i[2:7]]
# DEFINE
UNIHAN_VER = '6.3.0'
SF_MIRROR = 'dfn'
SCIM_TABLES_VER = '0.5.13'
SCIM_PINYIN_VER = '0.5.92'
LIBTABE_VER = '0.2.3'
# END OF DEFINE
def download( url, dest ):
if os.path.isfile( dest ):
print( 'File %s is up to date.' % dest )
return
global islinux
if islinux:
# we use wget instead urlretrieve under Linux,
# because wget could display details like download progress
os.system( 'wget %s -O %s' % ( url, dest ) )
else:
print( 'Downloading from [%s] ...' % url )
urllib_request.urlretrieve( url, dest )
print( 'Download complete.\n' )
return
def uncompress( fp, member, encoding = 'U8' ):
name = member.rsplit( '/', 1 )[-1]
print( 'Extracting %s ...' % name )
fp.extract( member )
shutil.move( member, name )
if '/' in member:
shutil.rmtree( member.split( '/', 1 )[0] )
if pyversion[:1] in ['2']:
fc = open( name, 'rb', encoding, 'ignore' )
else:
fc = open( name, 'r', encoding = encoding, errors = 'ignore' )
return fc
unzip = lambda path, member, encoding = 'U8': \
uncompress( zf.ZipFile( path ), member, encoding )
untargz = lambda path, member, encoding = 'U8': \
uncompress( tf.open( path, 'r:gz' ), member, encoding )
def parserCore( fp, pos, beginmark = None, endmark = None ):
if beginmark and endmark:
start = False
else: start = True
mlist = set()
for line in fp:
if beginmark and line.startswith( beginmark ):
start = True
continue
elif endmark and line.startswith( endmark ):
break
if start and not line.startswith( '#' ):
elems = line.split()
if len( elems ) < 2:
continue
elif len( elems[0] ) > 1 and \
len( elems[pos] ) > 1: # words only
mlist.add( elems[pos] )
return mlist
def tablesParser( path, name ):
""" Read file from scim-tables and parse it. """
global SCIM_TABLES_VER
src = 'scim-tables-%s/tables/zh/%s' % ( SCIM_TABLES_VER, name )
fp = untargz( path, src, 'U8' )
return parserCore( fp, 1, 'BEGIN_TABLE', 'END_TABLE' )
ezbigParser = lambda path: tablesParser( path, 'EZ-Big.txt.in' )
wubiParser = lambda path: tablesParser( path, 'Wubi.txt.in' )
zrmParser = lambda path: tablesParser( path, 'Ziranma.txt.in' )
def phraseParser( path ):
""" Read phrase_lib.txt and parse it. """
global SCIM_PINYIN_VER
src = 'scim-pinyin-%s/data/phrase_lib.txt' % SCIM_PINYIN_VER
dst = 'phrase_lib.txt'
fp = untargz( path, src, 'U8' )
return parserCore( fp, 0 )
def tsiParser( path ):
""" Read tsi.src and parse it. """
src = 'libtabe/tsi-src/tsi.src'
dst = 'tsi.src'
fp = untargz( path, src, 'big5hkscs' )
return parserCore( fp, 0 )
def unihanParser( path ):
""" Read Unihan_Variants.txt and parse it. """
fp = unzip( path, 'Unihan_Variants.txt', 'U8' )
t2s = dict()
s2t = dict()
for line in fp:
if line.startswith( '#' ):
continue
else:
elems = line.split()
if len( elems ) < 3:
continue
type = elems.pop( 1 )
elems = unichr2( *elems )
if type == 'kTraditionalVariant':
s2t[elems[0]] = elems[1:]
elif type == 'kSimplifiedVariant':
t2s[elems[0]] = elems[1:]
fp.close()
return ( t2s, s2t )
def applyExcludes( mlist, path ):
""" Apply exclude rules from path to mlist. """
if pyversion[:1] in ['2']:
excludes = open( path, 'rb', 'U8' ).read().split()
else:
excludes = open( path, 'r', encoding = 'U8' ).read().split()
excludes = [word.split( '#' )[0].strip() for word in excludes]
excludes = '|'.join( excludes )
excptn = re.compile( '.*(?:%s).*' % excludes )
diff = [mword for mword in mlist if excptn.search( mword )]
mlist.difference_update( diff )
return mlist
def charManualTable( path ):
fp = open( path, 'r', encoding = 'U8' )
ret = {}
for line in fp:
elems = line.split( '#' )[0].split( '|' )
elems = unichr3( *elems )
if len( elems ) > 1:
ret[elems[0]] = elems[1:]
return ret
def toManyRules( src_table ):
tomany = set()
if pyversion[:1] in ['2']:
for ( f, t ) in src_table.iteritems():
for i in range( 1, len( t ) ):
tomany.add( t[i] )
else:
for ( f, t ) in src_table.items():
for i in range( 1, len( t ) ):
tomany.add( t[i] )
return tomany
def removeRules( path, table ):
fp = open( path, 'r', encoding = 'U8' )
texc = list()
for line in fp:
elems = line.split( '=>' )
f = t = elems[0].strip()
if len( elems ) == 2:
t = elems[1].strip()
f = f.strip('"').strip("'")
t = t.strip('"').strip("'")
if f:
try:
table.pop( f )
except:
pass
if t:
texc.append( t )
texcptn = re.compile( '^(?:%s)$' % '|'.join( texc ) )
if pyversion[:1] in ['2']:
for (tmp_f, tmp_t) in table.copy().iteritems():
if texcptn.match( tmp_t ):
table.pop( tmp_f )
else:
for (tmp_f, tmp_t) in table.copy().items():
if texcptn.match( tmp_t ):
table.pop( tmp_f )
return table
def customRules( path ):
fp = open( path, 'r', encoding = 'U8' )
ret = dict()
for line in fp:
elems = line.split( '#' )[0].split()
if len( elems ) > 1:
ret[elems[0]] = elems[1]
return ret
def dictToSortedList( src_table, pos ):
return sorted( src_table.items(), key = lambda m: m[pos] )
def translate( text, conv_table ):
i = 0
while i < len( text ):
for j in range( len( text ) - i, 0, -1 ):
f = text[i:][:j]
t = conv_table.get( f )
if t:
text = text[:i] + t + text[i:][j:]
i += len(t) - 1
break
i += 1
return text
def manualWordsTable( path, conv_table, reconv_table ):
fp = open( path, 'r', encoding = 'U8' )
reconv_table = {}
wordlist = [line.split( '#' )[0].strip() for line in fp]
wordlist = list( set( wordlist ) )
wordlist.sort( key = len, reverse = True )
while wordlist:
word = wordlist.pop()
new_word = translate( word, conv_table )
rcv_word = translate( word, reconv_table )
if word != rcv_word:
reconv_table[word] = word
reconv_table[new_word] = word
return reconv_table
def defaultWordsTable( src_wordlist, src_tomany, char_conv_table, char_reconv_table ):
wordlist = list( src_wordlist )
wordlist.sort( key = len, reverse = True )
word_conv_table = {}
word_reconv_table = {}
conv_table = char_conv_table.copy()
reconv_table = char_reconv_table.copy()
tomanyptn = re.compile( '(?:%s)' % '|'.join( src_tomany ) )
while wordlist:
conv_table.update( word_conv_table )
reconv_table.update( word_reconv_table )
word = wordlist.pop()
new_word_len = word_len = len( word )
while new_word_len == word_len:
add = False
test_word = translate( word, reconv_table )
new_word = translate( word, conv_table )
if not reconv_table.get( new_word ) \
and ( test_word != word \
or ( tomanyptn.search( word ) \
and word != translate( new_word, reconv_table ) ) ):
word_conv_table[word] = new_word
word_reconv_table[new_word] = word
try:
word = wordlist.pop()
except IndexError:
break
new_word_len = len(word)
return word_reconv_table
def PHPArray( table ):
lines = ['\'%s\' => \'%s\',' % (f, t) for (f, t) in table if f and t]
return '\n'.join(lines)
def main():
#Get Unihan.zip:
url = 'http://www.unicode.org/Public/%s/ucd/Unihan.zip' % UNIHAN_VER
han_dest = 'Unihan.zip'
download( url, han_dest )
# Get scim-tables-$(SCIM_TABLES_VER).tar.gz:
url = 'http://%s.dl.sourceforge.net/sourceforge/scim/scim-tables-%s.tar.gz' % ( SF_MIRROR, SCIM_TABLES_VER )
tbe_dest = 'scim-tables-%s.tar.gz' % SCIM_TABLES_VER
download( url, tbe_dest )
# Get scim-pinyin-$(SCIM_PINYIN_VER).tar.gz:
url = 'http://%s.dl.sourceforge.net/sourceforge/scim/scim-pinyin-%s.tar.gz' % ( SF_MIRROR, SCIM_PINYIN_VER )
pyn_dest = 'scim-pinyin-%s.tar.gz' % SCIM_PINYIN_VER
download( url, pyn_dest )
# Get libtabe-$(LIBTABE_VER).tgz:
url = 'http://%s.dl.sourceforge.net/sourceforge/libtabe/libtabe-%s.tgz' % ( SF_MIRROR, LIBTABE_VER )
lbt_dest = 'libtabe-%s.tgz' % LIBTABE_VER
download( url, lbt_dest )
# Unihan.txt
( t2s_1tomany, s2t_1tomany ) = unihanParser( han_dest )
t2s_1tomany.update( charManualTable( 'trad2simp.manual' ) )
s2t_1tomany.update( charManualTable( 'simp2trad.manual' ) )
if pyversion[:1] in ['2']:
t2s_1to1 = dict( [( f, t[0] ) for ( f, t ) in t2s_1tomany.iteritems()] )
s2t_1to1 = dict( [( f, t[0] ) for ( f, t ) in s2t_1tomany.iteritems()] )
else:
t2s_1to1 = dict( [( f, t[0] ) for ( f, t ) in t2s_1tomany.items()] )
s2t_1to1 = dict( [( f, t[0] ) for ( f, t ) in s2t_1tomany.items()] )
s_tomany = toManyRules( t2s_1tomany )
t_tomany = toManyRules( s2t_1tomany )
# noconvert rules
t2s_1to1 = removeRules( 'trad2simp_noconvert.manual', t2s_1to1 )
s2t_1to1 = removeRules( 'simp2trad_noconvert.manual', s2t_1to1 )
# the supper set for word to word conversion
t2s_1to1_supp = t2s_1to1.copy()
s2t_1to1_supp = s2t_1to1.copy()
t2s_1to1_supp.update( customRules( 'trad2simp_supp_set.manual' ) )
s2t_1to1_supp.update( customRules( 'simp2trad_supp_set.manual' ) )
# word to word manual rules
t2s_word2word_manual = manualWordsTable( 'simpphrases.manual', s2t_1to1_supp, t2s_1to1_supp )
t2s_word2word_manual.update( customRules( 'toSimp.manual' ) )
s2t_word2word_manual = manualWordsTable( 'tradphrases.manual', t2s_1to1_supp, s2t_1to1_supp )
s2t_word2word_manual.update( customRules( 'toTrad.manual' ) )
# word to word rules from input methods
t_wordlist = set()
s_wordlist = set()
t_wordlist.update( ezbigParser( tbe_dest ),
tsiParser( lbt_dest ) )
s_wordlist.update( wubiParser( tbe_dest ),
zrmParser( tbe_dest ),
phraseParser( pyn_dest ) )
# exclude
s_wordlist = applyExcludes( s_wordlist, 'simpphrases_exclude.manual' )
t_wordlist = applyExcludes( t_wordlist, 'tradphrases_exclude.manual' )
s2t_supp = s2t_1to1_supp.copy()
s2t_supp.update( s2t_word2word_manual )
t2s_supp = t2s_1to1_supp.copy()
t2s_supp.update( t2s_word2word_manual )
# parse list to dict
t2s_word2word = defaultWordsTable( s_wordlist, s_tomany, s2t_1to1_supp, t2s_supp )
t2s_word2word.update( t2s_word2word_manual )
s2t_word2word = defaultWordsTable( t_wordlist, t_tomany, t2s_1to1_supp, s2t_supp )
s2t_word2word.update( s2t_word2word_manual )
# Final tables
# sorted list toHans
if pyversion[:1] in ['2']:
t2s_1to1 = dict( [( f, t ) for ( f, t ) in t2s_1to1.iteritems() if f != t] )
else:
t2s_1to1 = dict( [( f, t ) for ( f, t ) in t2s_1to1.items() if f != t] )
toHans = dictToSortedList( t2s_1to1, 0 ) + dictToSortedList( t2s_word2word, 1 )
# sorted list toHant
if pyversion[:1] in ['2']:
s2t_1to1 = dict( [( f, t ) for ( f, t ) in s2t_1to1.iteritems() if f != t] )
else:
s2t_1to1 = dict( [( f, t ) for ( f, t ) in s2t_1to1.items() if f != t] )
toHant = dictToSortedList( s2t_1to1, 0 ) + dictToSortedList( s2t_word2word, 1 )
# sorted list toCN
toCN = dictToSortedList( customRules( 'toCN.manual' ), 1 )
# sorted list toHK
toHK = dictToSortedList( customRules( 'toHK.manual' ), 1 )
# sorted list toSG
toSG = dictToSortedList( customRules( 'toSG.manual' ), 1 )
# sorted list toTW
toTW = dictToSortedList( customRules( 'toTW.manual' ), 1 )
# Get PHP Array
php = '''<?php
/**
* Simplified / Traditional Chinese conversion tables
*
* Automatically generated using code and data in maintenance/language/zhtable/
* Do not modify directly!
*
* @file
*/
$zh2Hant = array(\n'''
php += PHPArray( toHant ) \
+ '\n);\n\n$zh2Hans = array(\n' \
+ PHPArray( toHans ) \
+ '\n);\n\n$zh2TW = array(\n' \
+ PHPArray( toTW ) \
+ '\n);\n\n$zh2HK = array(\n' \
+ PHPArray( toHK ) \
+ '\n);\n\n$zh2CN = array(\n' \
+ PHPArray( toCN ) \
+ '\n);\n\n$zh2SG = array(\n' \
+ PHPArray( toSG ) \
+ '\n);\n'
if pyversion[:1] in ['2']:
f = open( os.path.join( '..', '..', '..', 'includes', 'ZhConversion.php' ), 'wb', encoding = 'utf8' )
else:
f = open( os.path.join( '..', '..', '..', 'includes', 'ZhConversion.php' ), 'w', buffering = 4096, encoding = 'utf8' )
print ('Writing ZhConversion.php ... ')
f.write( php )
f.close()
# Remove temporary files
print ('Deleting temporary files ... ')
os.remove('EZ-Big.txt.in')
os.remove('phrase_lib.txt')
os.remove('tsi.src')
os.remove('Unihan_Variants.txt')
os.remove('Wubi.txt.in')
os.remove('Ziranma.txt.in')
if __name__ == '__main__':
main()
| gpl-2.0 | -5,167,589,613,223,338,000 | 33.539192 | 126 | 0.553951 | false |
with-git/tensorflow | tensorflow/python/eager/benchmarks_test.py | 5 | 4874 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for low-level eager execution primitives.
Packaged as a test to ensure that this code is exercised by continuous
integration tests. To get numbers:
bazel build -c opt :benchmarks_test &&
./bazel-bin/tensorflow/python/eager/benchmarks_test --iters=0
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import contextlib
import sys
import time
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import backprop # pylint: disable=unused-import
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.eager import tensor
from tensorflow.python.eager import test
from tensorflow.python.framework import test_util
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
FLAGS = None
@contextlib.contextmanager
def timer(label, iters=30000):
start = time.time()
yield xrange(iters)
end = time.time()
t = (end - start) * 1e6 / iters
print("%-40s took %.2fus (%d iterations)" % (label, t, iters))
def benchmark_create_tensor(n):
"""Benchmark overheads of creating a Tensor object."""
def label(s):
return "{:20s}".format(s)
with timer(label("np.array([[3]])"), iters=n) as iters:
for _ in iters:
np.array([[3]])
with timer(label("Tensor([[3]])"), iters=n) as iters:
for _ in iters:
tensor.Tensor([[3]])
def benchmark_matmul(shape, n, use_gpu=False):
"""Benchmark for matrix multiplication using tf.matmul."""
transpose_b = (shape[0] != shape[1])
m = random_ops.random_uniform(shape)
if use_gpu:
m = m.as_gpu_tensor()
# Warm up the GPU - the very first kernel invocation
# seems to require a bunch of setup.
math_ops.matmul(m, m, transpose_b=transpose_b)
def label(s):
return "MatMul {}: {:30s}".format(shape, s)
if not use_gpu:
a = m.as_cpu_tensor().numpy()
b = a.T if transpose_b else a
with timer(label("np.dot"), iters=n) as iters:
for _ in iters:
np.dot(a, b)
with timer(label("tf.matmul"), iters=n) as iters:
for _ in iters:
math_ops.matmul(m, m, transpose_b=transpose_b)
with timer(label("gen_math_ops.mat_mul"), iters=n) as iters:
for _ in iters:
gen_math_ops._mat_mul(m, m, transpose_b=transpose_b)
# pylint: disable=protected-access
input_handles = [m._handle, m._handle]
ctx_handle = context.context()._handle
# pylint: enable=protected-access
attrs = ("transpose_a", False, "transpose_b", transpose_b, "T",
m.dtype.as_datatype_enum)
with timer(label("TFE_Py_Execute"), iters=n) as iters:
for _ in iters:
pywrap_tensorflow.TFE_DeleteTensorHandle(
pywrap_tensorflow.TFE_Py_Execute(ctx_handle, None, "MatMul",
input_handles, attrs, 1)[0])
f = function.defun(math_ops.matmul)
with timer(label("defun(tf.matmul)"), iters=n) as iters:
for _ in iters:
f(m, m, transpose_b=transpose_b)
class BenchmarksTest(test_util.TensorFlowTestCase):
def testBenchmarks(self):
# This isn't actually a test, but benchmarks packaged as a test
# so that continuous integration runs catch any breakages.
print(context.context())
benchmark_create_tensor(FLAGS.iters or 30000)
benchmark_matmul([2, 2], FLAGS.iters or 30000)
benchmark_matmul([100, 28 * 28], FLAGS.iters or 1000)
if context.context().num_gpus() > 0:
print("---- RUNNING ON GPU NOW ----")
benchmark_matmul([2, 2], FLAGS.iters or 30000, use_gpu=True)
benchmark_matmul([100, 28 * 28], FLAGS.iters or 1000, use_gpu=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# Default iterations to 1 to keep continuos integration test times low.
parser.add_argument(
"--iters",
type=int,
default=1,
help="Number of iterators for each test. None or 0 for auto-selection")
FLAGS, unparsed = parser.parse_known_args()
sys.argv = [sys.argv[0]] + unparsed
test.main()
| apache-2.0 | -3,579,026,650,850,242,600 | 32.613793 | 80 | 0.676652 | false |
rest-of/the-deck | lambda/lib/python2.7/site-packages/pip/_vendor/progress/counter.py | 510 | 1502 | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from . import Infinite, Progress
from .helpers import WriteMixin
class Counter(WriteMixin, Infinite):
message = ''
hide_cursor = True
def update(self):
self.write(str(self.index))
class Countdown(WriteMixin, Progress):
hide_cursor = True
def update(self):
self.write(str(self.remaining))
class Stack(WriteMixin, Progress):
phases = (u' ', u'▁', u'▂', u'▃', u'▄', u'▅', u'▆', u'▇', u'█')
hide_cursor = True
def update(self):
nphases = len(self.phases)
i = min(nphases - 1, int(self.progress * nphases))
self.write(self.phases[i])
class Pie(Stack):
phases = (u'○', u'◔', u'◑', u'◕', u'●')
| mit | 6,100,494,108,667,746,000 | 30.404255 | 74 | 0.684959 | false |
hzy001/ansible | lib/ansible/plugins/callback/hipchat.py | 101 | 5867 | # (C) 2014, Matt Martz <matt@sivel.net>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import urllib
import urllib2
try:
import prettytable
HAS_PRETTYTABLE = True
except ImportError:
HAS_PRETTYTABLE = False
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""This is an example ansible callback plugin that sends status
updates to a HipChat channel during playbook execution.
This plugin makes use of the following environment variables:
HIPCHAT_TOKEN (required): HipChat API token
HIPCHAT_ROOM (optional): HipChat room to post in. Default: ansible
HIPCHAT_FROM (optional): Name to post as. Default: ansible
HIPCHAT_NOTIFY (optional): Add notify flag to important messages ("true" or "false"). Default: true
Requires:
prettytable
"""
CALLBACK_VERSION = 2.0
CALLBACK_VERSION = 2.0
CALLBACK_NAME = 'hipchat'
def __init__(self, display):
super(CallbackModule, self).__init__(display)
if not HAS_PRETTYTABLE:
self.disabled = True
self.display.warning('The `prettytable` python module is not installed. '
'Disabling the HipChat callback plugin.')
self.msg_uri = 'https://api.hipchat.com/v1/rooms/message'
self.token = os.getenv('HIPCHAT_TOKEN')
self.room = os.getenv('HIPCHAT_ROOM', 'ansible')
self.from_name = os.getenv('HIPCHAT_FROM', 'ansible')
self.allow_notify = (os.getenv('HIPCHAT_NOTIFY') != 'false')
if self.token is None:
self.disabled = True
self.display.warning('HipChat token could not be loaded. The HipChat '
'token can be provided using the `HIPCHAT_TOKEN` '
'environment variable.')
self.printed_playbook = False
self.playbook_name = None
def send_msg(self, msg, msg_format='text', color='yellow', notify=False):
"""Method for sending a message to HipChat"""
params = {}
params['room_id'] = self.room
params['from'] = self.from_name[:15] # max length is 15
params['message'] = msg
params['message_format'] = msg_format
params['color'] = color
params['notify'] = int(self.allow_notify and notify)
url = ('%s?auth_token=%s' % (self.msg_uri, self.token))
try:
response = urllib2.urlopen(url, urllib.urlencode(params))
return response.read()
except:
self.display.warning('Could not submit message to hipchat')
def playbook_on_play_start(self, name):
"""Display Playbook and play start messages"""
# This block sends information about a playbook when it starts
# The playbook object is not immediately available at
# playbook_on_start so we grab it via the play
#
# Displays info about playbook being started by a person on an
# inventory, as well as Tags, Skip Tags and Limits
if not self.printed_playbook:
self.playbook_name, _ = os.path.splitext(
os.path.basename(self.play.playbook.filename))
host_list = self.play.playbook.inventory.host_list
inventory = os.path.basename(os.path.realpath(host_list))
self.send_msg("%s: Playbook initiated by %s against %s" %
(self.playbook_name,
self.play.playbook.remote_user,
inventory), notify=True)
self.printed_playbook = True
subset = self.play.playbook.inventory._subset
skip_tags = self.play.playbook.skip_tags
self.send_msg("%s:\nTags: %s\nSkip Tags: %s\nLimit: %s" %
(self.playbook_name,
', '.join(self.play.playbook.only_tags),
', '.join(skip_tags) if skip_tags else None,
', '.join(subset) if subset else subset))
# This is where we actually say we are starting a play
self.send_msg("%s: Starting play: %s" %
(self.playbook_name, name))
def playbook_on_stats(self, stats):
"""Display info about playbook statistics"""
hosts = sorted(stats.processed.keys())
t = prettytable.PrettyTable(['Host', 'Ok', 'Changed', 'Unreachable',
'Failures'])
failures = False
unreachable = False
for h in hosts:
s = stats.summarize(h)
if s['failures'] > 0:
failures = True
if s['unreachable'] > 0:
unreachable = True
t.add_row([h] + [s[k] for k in ['ok', 'changed', 'unreachable',
'failures']])
self.send_msg("%s: Playbook complete" % self.playbook_name,
notify=True)
if failures or unreachable:
color = 'red'
self.send_msg("%s: Failures detected" % self.playbook_name,
color=color, notify=True)
else:
color = 'green'
self.send_msg("/code %s:\n%s" % (self.playbook_name, t), color=color)
| gpl-3.0 | -6,617,036,219,651,750,000 | 37.346405 | 107 | 0.591785 | false |
tianyang-li/de-novo-rna-seq-quant-1 | util/trinity_0.py | 1 | 3040 | #!/usr/bin/env python
# Copyright (C) 2012 Tianyang Li
# tmy1018@gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
"""
handles Trinity's output
"""
from __future__ import division
from collections import defaultdict
import re
from Bio import SeqIO
from fasta_0 import FastaSeq
class TrinityContig(FastaSeq):
__slots__ = ['nodes']
def __init__(self, rec_id, seq, nodes):
"""
nodes
a list of
(node, low, high)
describing nodes in splice graph
"""
super(TrinityContig, self).__init__(rec_id, seq)
self.nodes = nodes
def find_start(self, start):
"""
find the node of a psl.tStart is in
it is assumed that _start_ can always
find a node to fit in
"""
l = 0
h = len(self.nodes) - 1
x = int((l + h) / 2)
while (start < self.nodes[x][1]
or start >= self.nodes[x][2]):
if start < self.nodes[x][1]:
h = x - 1
else:
l = x + 1
x = int((l + h) / 2)
return x
def find_end(self, end):
"""
find the node a psl.tEnd is in
it is assumed that _end_ can always find
a node to fit in
"""
l = 0
h = len(self.nodes) - 1
x = int((l + h) / 2)
while (end <= self.nodes[x][1]
or end > self.nodes[x][2]):
if end <= self.nodes[x][1]:
h = x - 1
else:
l = x + 1
x = int((l + h) / 2)
return x
path_re = re.compile(r'path=\[(.*)\]')
node_re = re.compile(r'(\w*):(\d*)-(\d*)')
def convert_node_re(n):
return (n[0], int(n[1]), int(n[2]) + 1)
def get_contig_nodes(rec_decription):
global path_re, node_re
nodes = path_re.search(rec_decription).group(1).split(" ")
return map(lambda n: convert_node_re(node_re.search(n).groups()), nodes)
def get_contig_dict(trinity_out_file):
"""
return a defaultdict(dict)
where
contig_dict[graph][contig]
is _contig_ from _graph_
"""
contig_dict = defaultdict(dict)
for rec in SeqIO.parse(trinity_out_file, 'fasta'):
rec_id = rec.id
contig_dict[rec_id.split("_")[0]][rec_id] = TrinityContig(rec_id, str(rec.seq), get_contig_nodes(rec.description))
return contig_dict
| gpl-3.0 | 3,560,122,842,697,105,000 | 24.123967 | 122 | 0.538158 | false |
sawenzel/root | interpreter/llvm/src/utils/lint/common_lint.py | 147 | 2589 | #!/usr/bin/python
#
# Common lint functions applicable to multiple types of files.
import re
def VerifyLineLength(filename, lines, max_length):
"""Checks to make sure the file has no lines with lines exceeding the length
limit.
Args:
filename: the file under consideration as string
lines: contents of the file as string array
max_length: maximum acceptable line length as number
Returns:
A list of tuples with format [(filename, line number, msg), ...] with any
violations found.
"""
lint = []
line_num = 1
for line in lines:
length = len(line.rstrip('\n'))
if length > max_length:
lint.append((filename, line_num,
'Line exceeds %d chars (%d)' % (max_length, length)))
line_num += 1
return lint
def VerifyTabs(filename, lines):
"""Checks to make sure the file has no tab characters.
Args:
filename: the file under consideration as string
lines: contents of the file as string array
Returns:
A list of tuples with format [(line_number, msg), ...] with any violations
found.
"""
lint = []
tab_re = re.compile(r'\t')
line_num = 1
for line in lines:
if tab_re.match(line.rstrip('\n')):
lint.append((filename, line_num, 'Tab found instead of whitespace'))
line_num += 1
return lint
def VerifyTrailingWhitespace(filename, lines):
"""Checks to make sure the file has no lines with trailing whitespace.
Args:
filename: the file under consideration as string
lines: contents of the file as string array
Returns:
A list of tuples with format [(filename, line number, msg), ...] with any
violations found.
"""
lint = []
trailing_whitespace_re = re.compile(r'\s+$')
line_num = 1
for line in lines:
if trailing_whitespace_re.match(line.rstrip('\n')):
lint.append((filename, line_num, 'Trailing whitespace'))
line_num += 1
return lint
class BaseLint:
def RunOnFile(filename, lines):
raise Exception('RunOnFile() unimplemented')
def RunLintOverAllFiles(linter, filenames):
"""Runs linter over the contents of all files.
Args:
lint: subclass of BaseLint, implementing RunOnFile()
filenames: list of all files whose contents will be linted
Returns:
A list of tuples with format [(filename, line number, msg), ...] with any
violations found.
"""
lint = []
for filename in filenames:
file = open(filename, 'r')
if not file:
print 'Cound not open %s' % filename
continue
lines = file.readlines()
lint.extend(linter.RunOnFile(filename, lines))
return lint
| lgpl-2.1 | 4,966,973,305,257,724,000 | 25.690722 | 78 | 0.668212 | false |
toshywoshy/ansible | lib/ansible/module_utils/aws/iam.py | 60 | 2118 | # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import traceback
try:
from botocore.exceptions import ClientError, NoCredentialsError
except ImportError:
pass # caught by HAS_BOTO3
from ansible.module_utils._text import to_native
def get_aws_account_id(module):
""" Given AnsibleAWSModule instance, get the active AWS account ID
get_account_id tries too find out the account that we are working
on. It's not guaranteed that this will be easy so we try in
several different ways. Giving either IAM or STS privilages to
the account should be enough to permit this.
"""
account_id = None
try:
sts_client = module.client('sts')
account_id = sts_client.get_caller_identity().get('Account')
# non-STS sessions may also get NoCredentialsError from this STS call, so
# we must catch that too and try the IAM version
except (ClientError, NoCredentialsError):
try:
iam_client = module.client('iam')
account_id = iam_client.get_user()['User']['Arn'].split(':')[4]
except ClientError as e:
if (e.response['Error']['Code'] == 'AccessDenied'):
except_msg = to_native(e)
# don't match on `arn:aws` because of China region `arn:aws-cn` and similar
account_id = except_msg.search(r"arn:\w+:iam::([0-9]{12,32}):\w+/").group(1)
if account_id is None:
module.fail_json_aws(e, msg="Could not get AWS account information")
except Exception as e:
module.fail_json(
msg="Failed to get AWS account information, Try allowing sts:GetCallerIdentity or iam:GetUser permissions.",
exception=traceback.format_exc()
)
if not account_id:
module.fail_json(msg="Failed while determining AWS account ID. Try allowing sts:GetCallerIdentity or iam:GetUser permissions.")
return to_native(account_id)
| gpl-3.0 | -5,774,802,890,351,353,000 | 42.22449 | 135 | 0.654391 | false |
GreenRecycleBin/servo | tests/wpt/web-platform-tests/tools/html5lib/setup.py | 418 | 1694 | from distutils.core import setup
import os
import codecs
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'
]
packages = ['html5lib'] + ['html5lib.'+name
for name in os.listdir(os.path.join('html5lib'))
if os.path.isdir(os.path.join('html5lib', name)) and
not name.startswith('.') and name != 'tests']
current_dir = os.path.dirname(__file__)
with codecs.open(os.path.join(current_dir, 'README.rst'), 'r', 'utf8') as readme_file:
with codecs.open(os.path.join(current_dir, 'CHANGES.rst'), 'r', 'utf8') as changes_file:
long_description = readme_file.read() + '\n' + changes_file.read()
setup(name='html5lib',
version='0.9999-dev',
url='https://github.com/html5lib/html5lib-python',
license="MIT License",
description='HTML parser based on the WHATWG HTML specifcation',
long_description=long_description,
classifiers=classifiers,
maintainer='James Graham',
maintainer_email='james@hoppipolla.co.uk',
packages=packages,
install_requires=[
'six',
],
)
| mpl-2.0 | 4,715,814,492,737,516,000 | 37.5 | 92 | 0.615112 | false |
chadoe/xbmc | addons/service.xbmc.versioncheck/service.py | 58 | 3959 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Team-XBMC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import platform
import xbmc
import lib.common
from lib.common import log, dialog_yesno
from lib.common import upgrade_message as _upgrademessage
from lib.common import upgrade_message2 as _upgrademessage2
ADDON = lib.common.ADDON
ADDONVERSION = lib.common.ADDONVERSION
ADDONNAME = lib.common.ADDONNAME
ADDONPATH = lib.common.ADDONPATH
ICON = lib.common.ICON
oldversion = False
class Main:
def __init__(self):
linux = False
packages = []
xbmc.sleep(5000)
if xbmc.getCondVisibility('System.Platform.Linux') and ADDON.getSetting("upgrade_apt") == 'true':
packages = ['kodi']
_versionchecklinux(packages)
else:
oldversion, version_installed, version_available, version_stable = _versioncheck()
if oldversion:
_upgrademessage2( version_installed, version_available, version_stable, oldversion, False)
def _versioncheck():
# initial vars
from lib.jsoninterface import get_installedversion, get_versionfilelist
from lib.versions import compare_version
# retrieve versionlists from supplied version file
versionlist = get_versionfilelist()
# retrieve version installed
version_installed = get_installedversion()
# copmpare installed and available
oldversion, version_installed, version_available, version_stable = compare_version(version_installed, versionlist)
return oldversion, version_installed, version_available, version_stable
def _versionchecklinux(packages):
if platform.dist()[0].lower() in ['ubuntu', 'debian', 'linuxmint']:
handler = False
result = False
try:
# try aptdeamon first
from lib.aptdeamonhandler import AptdeamonHandler
handler = AptdeamonHandler()
except:
# fallback to shell
# since we need the user password, ask to check for new version first
from lib.shellhandlerapt import ShellHandlerApt
sudo = True
handler = ShellHandlerApt(sudo)
if dialog_yesno(32015):
pass
elif dialog_yesno(32009, 32010):
log("disabling addon by user request")
ADDON.setSetting("versioncheck_enable", 'false')
return
if handler:
if handler.check_upgrade_available(packages[0]):
if _upgrademessage(32012, oldversion, True):
if ADDON.getSetting("upgrade_system") == "false":
result = handler.upgrade_package(packages[0])
else:
result = handler.upgrade_system()
if result:
from lib.common import message_upgrade_success, message_restart
message_upgrade_success()
message_restart()
else:
log("Error during upgrade")
else:
log("Error: no handler found")
else:
log("Unsupported platform %s" %platform.dist()[0])
sys.exit(0)
if (__name__ == "__main__"):
log('Version %s started' % ADDONVERSION)
Main()
| gpl-2.0 | 3,708,791,664,689,183,000 | 37.067308 | 118 | 0.626421 | false |
mancoast/CPythonPyc_test | fail/300_test_httpservers.py | 3 | 11640 | """Unittests for the various HTTPServer modules.
Written by Cody A.W. Somerville <cody-somerville@ubuntu.com>,
Josip Dzolonga, and Michael Otteneder for the 2007/08 GHOP contest.
"""
from http.server import BaseHTTPRequestHandler, HTTPServer, \
SimpleHTTPRequestHandler, CGIHTTPRequestHandler
import os
import sys
import base64
import shutil
import urllib.parse
import http.client
import tempfile
import threading
import unittest
from test import support
class NoLogRequestHandler:
def log_message(self, *args):
# don't write log messages to stderr
pass
def read(self, n=None):
return ''
class TestServerThread(threading.Thread):
def __init__(self, test_object, request_handler):
threading.Thread.__init__(self)
self.request_handler = request_handler
self.test_object = test_object
self.test_object.lock.acquire()
def run(self):
self.server = HTTPServer(('', 0), self.request_handler)
self.test_object.PORT = self.server.socket.getsockname()[1]
self.test_object.lock.release()
try:
self.server.serve_forever()
finally:
self.server.server_close()
def stop(self):
self.server.shutdown()
class BaseTestCase(unittest.TestCase):
def setUp(self):
self.lock = threading.Lock()
self.thread = TestServerThread(self, self.request_handler)
self.thread.start()
self.lock.acquire()
def tearDown(self):
self.lock.release()
self.thread.stop()
def request(self, uri, method='GET', body=None, headers={}):
self.connection = http.client.HTTPConnection('localhost', self.PORT)
self.connection.request(method, uri, body, headers)
return self.connection.getresponse()
class BaseHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
default_request_version = 'HTTP/1.1'
def do_TEST(self):
self.send_response(204)
self.send_header('Content-Type', 'text/html')
self.send_header('Connection', 'close')
self.end_headers()
def do_KEEP(self):
self.send_response(204)
self.send_header('Content-Type', 'text/html')
self.send_header('Connection', 'keep-alive')
self.end_headers()
def do_KEYERROR(self):
self.send_error(999)
def do_CUSTOM(self):
self.send_response(999)
self.send_header('Content-Type', 'text/html')
self.send_header('Connection', 'close')
self.end_headers()
def setUp(self):
BaseTestCase.setUp(self)
self.con = http.client.HTTPConnection('localhost', self.PORT)
self.con.connect()
def test_command(self):
self.con.request('GET', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_request_line_trimming(self):
self.con._http_vsn_str = 'HTTP/1.1\n'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_version_bogus(self):
self.con._http_vsn_str = 'FUBAR'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_version_digits(self):
self.con._http_vsn_str = 'HTTP/9.9.9'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_version_none_get(self):
self.con._http_vsn_str = ''
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_version_none(self):
self.con._http_vsn_str = ''
self.con.putrequest('PUT', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_version_invalid(self):
self.con._http_vsn = 99
self.con._http_vsn_str = 'HTTP/9.9'
self.con.putrequest('GET', '/')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 505)
def test_send_blank(self):
self.con._http_vsn_str = ''
self.con.putrequest('', '')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 400)
def test_header_close(self):
self.con.putrequest('GET', '/')
self.con.putheader('Connection', 'close')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_head_keep_alive(self):
self.con._http_vsn_str = 'HTTP/1.1'
self.con.putrequest('GET', '/')
self.con.putheader('Connection', 'keep-alive')
self.con.endheaders()
res = self.con.getresponse()
self.assertEquals(res.status, 501)
def test_handler(self):
self.con.request('TEST', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 204)
def test_return_header_keep_alive(self):
self.con.request('KEEP', '/')
res = self.con.getresponse()
self.assertEquals(res.getheader('Connection'), 'keep-alive')
self.con.request('TEST', '/')
def test_internal_key_error(self):
self.con.request('KEYERROR', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 999)
def test_return_custom_status(self):
self.con.request('CUSTOM', '/')
res = self.con.getresponse()
self.assertEquals(res.status, 999)
class SimpleHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler):
pass
def setUp(self):
BaseTestCase.setUp(self)
self.cwd = os.getcwd()
basetempdir = tempfile.gettempdir()
os.chdir(basetempdir)
self.data = b'We are the knights who say Ni!'
self.tempdir = tempfile.mkdtemp(dir=basetempdir)
self.tempdir_name = os.path.basename(self.tempdir)
temp = open(os.path.join(self.tempdir, 'test'), 'wb')
temp.write(self.data)
temp.close()
def tearDown(self):
try:
os.chdir(self.cwd)
try:
shutil.rmtree(self.tempdir)
except:
pass
finally:
BaseTestCase.tearDown(self)
def check_status_and_reason(self, response, status, data=None):
body = response.read()
self.assert_(response)
self.assertEquals(response.status, status)
self.assert_(response.reason != None)
if data:
self.assertEqual(data, body)
def test_get(self):
#constructs the path relative to the root directory of the HTTPServer
response = self.request(self.tempdir_name + '/test')
self.check_status_and_reason(response, 200, data=self.data)
response = self.request(self.tempdir_name + '/')
self.check_status_and_reason(response, 200)
response = self.request(self.tempdir_name)
self.check_status_and_reason(response, 301)
response = self.request('/ThisDoesNotExist')
self.check_status_and_reason(response, 404)
response = self.request('/' + 'ThisDoesNotExist' + '/')
self.check_status_and_reason(response, 404)
f = open(os.path.join(self.tempdir_name, 'index.html'), 'w')
response = self.request('/' + self.tempdir_name + '/')
self.check_status_and_reason(response, 200)
if os.name == 'posix':
# chmod won't work as expected on Windows platforms
os.chmod(self.tempdir, 0)
response = self.request(self.tempdir_name + '/')
self.check_status_and_reason(response, 404)
os.chmod(self.tempdir, 0o755)
def test_head(self):
response = self.request(
self.tempdir_name + '/test', method='HEAD')
self.check_status_and_reason(response, 200)
self.assertEqual(response.getheader('content-length'),
str(len(self.data)))
self.assertEqual(response.getheader('content-type'),
'application/octet-stream')
def test_invalid_requests(self):
response = self.request('/', method='FOO')
self.check_status_and_reason(response, 501)
# requests must be case sensitive,so this should fail too
response = self.request('/', method='get')
self.check_status_and_reason(response, 501)
response = self.request('/', method='GETs')
self.check_status_and_reason(response, 501)
cgi_file1 = """\
#!%s
print("Content-type: text/html")
print()
print("Hello World")
"""
cgi_file2 = """\
#!%s
import cgi
print("Content-type: text/html")
print()
form = cgi.FieldStorage()
print("%%s, %%s, %%s" %% (form.getfirst("spam"), form.getfirst("eggs"),\
form.getfirst("bacon")))
"""
class CGIHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, CGIHTTPRequestHandler):
pass
def setUp(self):
BaseTestCase.setUp(self)
self.parent_dir = tempfile.mkdtemp()
self.cgi_dir = os.path.join(self.parent_dir, 'cgi-bin')
os.mkdir(self.cgi_dir)
self.file1_path = os.path.join(self.cgi_dir, 'file1.py')
with open(self.file1_path, 'w') as file1:
file1.write(cgi_file1 % sys.executable)
os.chmod(self.file1_path, 0o777)
self.file2_path = os.path.join(self.cgi_dir, 'file2.py')
with open(self.file2_path, 'w') as file2:
file2.write(cgi_file2 % sys.executable)
os.chmod(self.file2_path, 0o777)
self.cwd = os.getcwd()
os.chdir(self.parent_dir)
def tearDown(self):
try:
os.chdir(self.cwd)
os.remove(self.file1_path)
os.remove(self.file2_path)
os.rmdir(self.cgi_dir)
os.rmdir(self.parent_dir)
finally:
BaseTestCase.tearDown(self)
def test_headers_and_content(self):
res = self.request('/cgi-bin/file1.py')
self.assertEquals((b'Hello World\n', 'text/html', 200), \
(res.read(), res.getheader('Content-type'), res.status))
def test_post(self):
params = urllib.parse.urlencode(
{'spam' : 1, 'eggs' : 'python', 'bacon' : 123456})
headers = {'Content-type' : 'application/x-www-form-urlencoded'}
res = self.request('/cgi-bin/file2.py', 'POST', params, headers)
self.assertEquals(res.read(), b'1, python, 123456\n')
def test_invaliduri(self):
res = self.request('/cgi-bin/invalid')
res.read()
self.assertEquals(res.status, 404)
def test_authorization(self):
headers = {b'Authorization' : b'Basic ' +
base64.b64encode(b'username:pass')}
res = self.request('/cgi-bin/file1.py', 'GET', headers=headers)
self.assertEquals((b'Hello World\n', 'text/html', 200), \
(res.read(), res.getheader('Content-type'), res.status))
def test_main(verbose=None):
try:
cwd = os.getcwd()
support.run_unittest(BaseHTTPServerTestCase,
SimpleHTTPServerTestCase,
CGIHTTPServerTestCase
)
finally:
os.chdir(cwd)
if __name__ == '__main__':
test_main()
| gpl-3.0 | 1,020,503,780,278,987,000 | 31.696629 | 77 | 0.599914 | false |
mrquim/repository.mrquim | repo/script.module.pycryptodome/lib/Crypto/SelfTest/Protocol/test_KDF.py | 5 | 16508 | # -*- coding: utf-8 -*-
#
# SelfTest/Protocol/test_KDF.py: Self-test for key derivation functions
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
import unittest
from binascii import unhexlify
from Crypto.Util.py3compat import *
from Crypto.SelfTest.st_common import list_test_cases
from Crypto.Hash import SHA1, HMAC, SHA256
from Crypto.Cipher import AES, DES3
from Crypto.Protocol.KDF import PBKDF1, PBKDF2, _S2V, HKDF, scrypt
def t2b(t):
if t is None:
return None
t2 = t.replace(" ", "").replace("\n", "")
return unhexlify(b(t2))
class TestVector(object):
pass
class PBKDF1_Tests(unittest.TestCase):
# List of tuples with test data.
# Each tuple is made up by:
# Item #0: a pass phrase
# Item #1: salt (8 bytes encoded in hex)
# Item #2: output key length
# Item #3: iterations to use
# Item #4: expected result (encoded in hex)
_testData = (
# From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf
("password","78578E5A5D63CB06",16,1000,"DC19847E05C64D2FAF10EBFB4A3D2A20"),
)
def test1(self):
v = self._testData[0]
res = PBKDF1(v[0], t2b(v[1]), v[2], v[3], SHA1)
self.assertEqual(res, t2b(v[4]))
class PBKDF2_Tests(unittest.TestCase):
# List of tuples with test data.
# Each tuple is made up by:
# Item #0: a pass phrase
# Item #1: salt (encoded in hex)
# Item #2: output key length
# Item #3: iterations to use
# Item #4: expected result (encoded in hex)
_testData = (
# From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf
("password","78578E5A5D63CB06",24,2048,"BFDE6BE94DF7E11DD409BCE20A0255EC327CB936FFE93643"),
# From RFC 6050
("password","73616c74", 20, 1, "0c60c80f961f0e71f3a9b524af6012062fe037a6"),
("password","73616c74", 20, 2, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"),
("password","73616c74", 20, 4096, "4b007901b765489abead49d926f721d065a429c1"),
("passwordPASSWORDpassword","73616c7453414c5473616c7453414c5473616c7453414c5473616c7453414c5473616c74",
25, 4096, "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"),
( 'pass\x00word',"7361006c74",16,4096, "56fa6aa75548099dcc37d7f03425e0c3"),
)
def test1(self):
# Test only for HMAC-SHA1 as PRF
def prf(p,s):
return HMAC.new(p,s,SHA1).digest()
for i in xrange(len(self._testData)):
v = self._testData[i]
res = PBKDF2(v[0], t2b(v[1]), v[2], v[3])
res2 = PBKDF2(v[0], t2b(v[1]), v[2], v[3], prf)
self.assertEqual(res, t2b(v[4]))
self.assertEqual(res, res2)
def test2(self):
"""From draft-josefsson-scrypt-kdf-01, Chapter 10"""
output_1 = t2b("""
55 ac 04 6e 56 e3 08 9f ec 16 91 c2 25 44 b6 05
f9 41 85 21 6d de 04 65 e6 8b 9d 57 c2 0d ac bc
49 ca 9c cc f1 79 b6 45 99 16 64 b3 9d 77 ef 31
7c 71 b8 45 b1 e3 0b d5 09 11 20 41 d3 a1 97 83
""")
output_2 = t2b("""
4d dc d8 f6 0b 98 be 21 83 0c ee 5e f2 27 01 f9
64 1a 44 18 d0 4c 04 14 ae ff 08 87 6b 34 ab 56
a1 d4 25 a1 22 58 33 54 9a db 84 1b 51 c9 b3 17
6a 27 2b de bb a1 d0 78 47 8f 62 b3 97 f3 3c 8d
""")
prf_hmac_sha256 = lambda p, s: HMAC.new(p, s, SHA256).digest()
output = PBKDF2(b("passwd"), b("salt"), 64, 1, prf=prf_hmac_sha256)
self.assertEqual(output, output_1)
output = PBKDF2(b("Password"), b("NaCl"), 64, 80000, prf=prf_hmac_sha256)
self.assertEqual(output, output_2)
class S2V_Tests(unittest.TestCase):
# Sequence of test vectors.
# Each test vector is made up by:
# Item #0: a tuple of strings
# Item #1: an AES key
# Item #2: the result
# Item #3: the cipher module S2V is based on
# Everything is hex encoded
_testData = [
# RFC5297, A.1
(
( '101112131415161718191a1b1c1d1e1f2021222324252627',
'112233445566778899aabbccddee' ),
'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0',
'85632d07c6e8f37f950acd320a2ecc93',
AES
),
# RFC5297, A.2
(
( '00112233445566778899aabbccddeeffdeaddadadeaddadaffeeddcc'+
'bbaa99887766554433221100',
'102030405060708090a0',
'09f911029d74e35bd84156c5635688c0',
'7468697320697320736f6d6520706c61'+
'696e7465787420746f20656e63727970'+
'74207573696e67205349562d414553'),
'7f7e7d7c7b7a79787776757473727170',
'7bdb6e3b432667eb06f4d14bff2fbd0f',
AES
),
]
def test1(self):
"""Verify correctness of test vector"""
for tv in self._testData:
s2v = _S2V.new(t2b(tv[1]), tv[3])
for s in tv[0]:
s2v.update(t2b(s))
result = s2v.derive()
self.assertEqual(result, t2b(tv[2]))
def test2(self):
"""Verify that no more than 127(AES) and 63(TDES)
components are accepted."""
key = bchr(0) * 8 + bchr(255) * 8
for module in (AES, DES3):
s2v = _S2V.new(key, module)
max_comps = module.block_size*8-1
for i in xrange(max_comps):
s2v.update(b("XX"))
self.assertRaises(TypeError, s2v.update, b("YY"))
class HKDF_Tests(unittest.TestCase):
# Test vectors from RFC5869, Appendix A
# Each tuple is made up by:
# Item #0: hash module
# Item #1: secret
# Item #2: salt
# Item #3: context
# Item #4: expected result
_test_vector = (
(
SHA256,
"0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b",
"000102030405060708090a0b0c",
"f0f1f2f3f4f5f6f7f8f9",
42,
"3cb25f25faacd57a90434f64d0362f2a" +
"2d2d0a90cf1a5a4c5db02d56ecc4c5bf" +
"34007208d5b887185865"
),
(
SHA256,
"000102030405060708090a0b0c0d0e0f" +
"101112131415161718191a1b1c1d1e1f" +
"202122232425262728292a2b2c2d2e2f" +
"303132333435363738393a3b3c3d3e3f" +
"404142434445464748494a4b4c4d4e4f",
"606162636465666768696a6b6c6d6e6f" +
"707172737475767778797a7b7c7d7e7f" +
"808182838485868788898a8b8c8d8e8f" +
"909192939495969798999a9b9c9d9e9f" +
"a0a1a2a3a4a5a6a7a8a9aaabacadaeaf",
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" +
"c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" +
"d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" +
"e0e1e2e3e4e5e6e7e8e9eaebecedeeef" +
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
82,
"b11e398dc80327a1c8e7f78c596a4934" +
"4f012eda2d4efad8a050cc4c19afa97c" +
"59045a99cac7827271cb41c65e590e09" +
"da3275600c2f09b8367793a9aca3db71" +
"cc30c58179ec3e87c14c01d5c1f3434f" +
"1d87"
),
(
SHA256,
"0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b",
None,
None,
42,
"8da4e775a563c18f715f802a063c5a31" +
"b8a11f5c5ee1879ec3454e5f3c738d2d" +
"9d201395faa4b61a96c8"
),
(
SHA1,
"0b0b0b0b0b0b0b0b0b0b0b",
"000102030405060708090a0b0c",
"f0f1f2f3f4f5f6f7f8f9",
42,
"085a01ea1b10f36933068b56efa5ad81" +
"a4f14b822f5b091568a9cdd4f155fda2" +
"c22e422478d305f3f896"
),
(
SHA1,
"000102030405060708090a0b0c0d0e0f" +
"101112131415161718191a1b1c1d1e1f" +
"202122232425262728292a2b2c2d2e2f" +
"303132333435363738393a3b3c3d3e3f" +
"404142434445464748494a4b4c4d4e4f",
"606162636465666768696a6b6c6d6e6f" +
"707172737475767778797a7b7c7d7e7f" +
"808182838485868788898a8b8c8d8e8f" +
"909192939495969798999a9b9c9d9e9f" +
"a0a1a2a3a4a5a6a7a8a9aaabacadaeaf",
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" +
"c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" +
"d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" +
"e0e1e2e3e4e5e6e7e8e9eaebecedeeef" +
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
82,
"0bd770a74d1160f7c9f12cd5912a06eb" +
"ff6adcae899d92191fe4305673ba2ffe" +
"8fa3f1a4e5ad79f3f334b3b202b2173c" +
"486ea37ce3d397ed034c7f9dfeb15c5e" +
"927336d0441f4c4300e2cff0d0900b52" +
"d3b4"
),
(
SHA1,
"0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b",
"",
"",
42,
"0ac1af7002b3d761d1e55298da9d0506" +
"b9ae52057220a306e07b6b87e8df21d0" +
"ea00033de03984d34918"
),
(
SHA1,
"0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c",
None,
"",
42,
"2c91117204d745f3500d636a62f64f0a" +
"b3bae548aa53d423b0d1f27ebba6f5e5" +
"673a081d70cce7acfc48"
)
)
def test1(self):
for tv in self._test_vector:
secret, salt, info, exp = [ t2b(tv[x]) for x in (1,2,3,5) ]
key_len, hashmod = [ tv[x] for x in (4,0) ]
output = HKDF(secret, key_len, salt, hashmod, 1, info)
self.assertEqual(output, exp)
def test2(self):
ref = HKDF(b("XXXXXX"), 12, b("YYYY"), SHA1)
# Same output, but this time split over 2 keys
key1, key2 = HKDF(b("XXXXXX"), 6, b("YYYY"), SHA1, 2)
self.assertEqual((ref[:6], ref[6:]), (key1, key2))
# Same output, but this time split over 3 keys
key1, key2, key3 = HKDF(b("XXXXXX"), 4, b("YYYY"), SHA1, 3)
self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3))
class scrypt_Tests(unittest.TestCase):
# Test vectors taken from
# http://tools.ietf.org/html/draft-josefsson-scrypt-kdf-00
data = (
(
"",
"",
16, # 2K
1,
1,
"""
77 d6 57 62 38 65 7b 20 3b 19 ca 42 c1 8a 04 97
f1 6b 48 44 e3 07 4a e8 df df fa 3f ed e2 14 42
fc d0 06 9d ed 09 48 f8 32 6a 75 3a 0f c8 1f 17
e8 d3 e0 fb 2e 0d 36 28 cf 35 e2 0c 38 d1 89 06
"""
),
(
"password",
"NaCl",
1024, # 1M
8,
16,
"""
fd ba be 1c 9d 34 72 00 78 56 e7 19 0d 01 e9 fe
7c 6a d7 cb c8 23 78 30 e7 73 76 63 4b 37 31 62
2e af 30 d9 2e 22 a3 88 6f f1 09 27 9d 98 30 da
c7 27 af b9 4a 83 ee 6d 83 60 cb df a2 cc 06 40
"""
),
(
"pleaseletmein",
"SodiumChloride",
16384, # 16M
8,
1,
"""
70 23 bd cb 3a fd 73 48 46 1c 06 cd 81 fd 38 eb
fd a8 fb ba 90 4f 8e 3e a9 b5 43 f6 54 5d a1 f2
d5 43 29 55 61 3f 0f cf 62 d4 97 05 24 2a 9a f9
e6 1e 85 dc 0d 65 1e 40 df cf 01 7b 45 57 58 87
"""
),
(
"pleaseletmein",
"SodiumChloride",
1048576, # 1G
8,
1,
"""
21 01 cb 9b 6a 51 1a ae ad db be 09 cf 70 f8 81
ec 56 8d 57 4a 2f fd 4d ab e5 ee 98 20 ad aa 47
8e 56 fd 8f 4b a5 d0 9f fa 1c 6d 92 7c 40 f4 c3
37 30 40 49 e8 a9 52 fb cb f4 5c 6f a7 7a 41 a4
"""
),
)
def setUp(self):
new_test_vectors = []
for tv in self.data:
new_tv = TestVector()
new_tv.P = b(tv[0])
new_tv.S = b(tv[1])
new_tv.N = tv[2]
new_tv.r = tv[3]
new_tv.p = tv[4]
new_tv.output = t2b(tv[5])
new_tv.dkLen = len(new_tv.output)
new_test_vectors.append(new_tv)
self.data = new_test_vectors
def _test1(self):
b_input = t2b("""
f7 ce 0b 65 3d 2d 72 a4 10 8c f5 ab e9 12 ff dd
77 76 16 db bb 27 a7 0e 82 04 f3 ae 2d 0f 6f ad
89 f6 8f 48 11 d1 e8 7b cc 3b d7 40 0a 9f fd 29
09 4f 01 84 63 95 74 f3 9a e5 a1 31 52 17 bc d7
89 49 91 44 72 13 bb 22 6c 25 b5 4d a8 63 70 fb
cd 98 43 80 37 46 66 bb 8f fc b5 bf 40 c2 54 b0
67 d2 7c 51 ce 4a d5 fe d8 29 c9 0b 50 5a 57 1b
7f 4d 1c ad 6a 52 3c da 77 0e 67 bc ea af 7e 89
""")
b_output = t2b("""
79 cc c1 93 62 9d eb ca 04 7f 0b 70 60 4b f6 b6
2c e3 dd 4a 96 26 e3 55 fa fc 61 98 e6 ea 2b 46
d5 84 13 67 3b 99 b0 29 d6 65 c3 57 60 1f b4 26
a0 b2 f4 bb a2 00 ee 9f 0a 43 d1 9b 57 1a 9c 71
ef 11 42 e6 5d 5a 26 6f dd ca 83 2c e5 9f aa 7c
ac 0b 9c f1 be 2b ff ca 30 0d 01 ee 38 76 19 c4
ae 12 fd 44 38 f2 03 a0 e4 e1 c4 7e c3 14 86 1f
4e 90 87 cb 33 39 6a 68 73 e8 f9 d2 53 9a 4b 8e
""")
from Crypto.Protocol.KDF import _scryptROMix
output = _scryptROMix(b_input, 16)
self.assertEqual(output, b_output)
def test2(self):
for tv in self.data:
# TODO: add runtime flag to enable test vectors
# with humongous memory usage
if tv.N > 100000:
continue
output = scrypt(tv.P, tv.S, tv.dkLen, tv.N, tv.r, tv.p)
self.assertEqual(output, tv.output)
def test3(self):
ref = scrypt(b("password"), b("salt"), 12, 16, 1, 1)
# Same output, but this time split over 2 keys
key1, key2 = scrypt(b("password"), b("salt"), 6, 16, 1, 1, 2)
self.assertEqual((ref[:6], ref[6:]), (key1, key2))
# Same output, but this time split over 3 keys
key1, key2, key3 = scrypt(b("password"), b("salt"), 4, 16, 1, 1, 3)
self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3))
def get_tests(config={}):
tests = []
tests += list_test_cases(PBKDF1_Tests)
tests += list_test_cases(PBKDF2_Tests)
tests += list_test_cases(S2V_Tests)
tests += list_test_cases(HKDF_Tests)
tests += list_test_cases(scrypt_Tests)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4
| gpl-2.0 | -2,932,617,852,454,835,000 | 35.766147 | 115 | 0.525079 | false |
dllsf/odootest | addons/crm_partner_assign/wizard/crm_forward_to_partner.py | 377 | 10606 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class crm_lead_forward_to_partner(osv.TransientModel):
""" Forward info history to partners. """
_name = 'crm.lead.forward.to.partner'
def _convert_to_assignation_line(self, cr, uid, lead, partner, context=None):
lead_location = []
partner_location = []
if lead.country_id:
lead_location.append(lead.country_id.name)
if lead.city:
lead_location.append(lead.city)
if partner:
if partner.country_id:
partner_location.append(partner.country_id.name)
if partner.city:
partner_location.append(partner.city)
return {'lead_id': lead.id,
'lead_location': ", ".join(lead_location),
'partner_assigned_id': partner and partner.id or False,
'partner_location': ", ".join(partner_location),
'lead_link': self.get_lead_portal_url(cr, uid, lead.id, lead.type, context=context),
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
lead_obj = self.pool.get('crm.lead')
email_template_obj = self.pool.get('email.template')
try:
template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'email_template_lead_forward_mail')[1]
except ValueError:
template_id = False
res = super(crm_lead_forward_to_partner, self).default_get(cr, uid, fields, context=context)
active_ids = context.get('active_ids')
default_composition_mode = context.get('default_composition_mode')
res['assignation_lines'] = []
if template_id:
res['body'] = email_template_obj.get_email_template(cr, uid, template_id).body_html
if active_ids:
lead_ids = lead_obj.browse(cr, uid, active_ids, context=context)
if default_composition_mode == 'mass_mail':
partner_assigned_ids = lead_obj.search_geo_partner(cr, uid, active_ids, context=context)
else:
partner_assigned_ids = dict((lead.id, lead.partner_assigned_id and lead.partner_assigned_id.id or False) for lead in lead_ids)
res['partner_id'] = lead_ids[0].partner_assigned_id.id
for lead in lead_ids:
partner_id = partner_assigned_ids.get(lead.id) or False
partner = False
if partner_id:
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
res['assignation_lines'].append(self._convert_to_assignation_line(cr, uid, lead, partner))
return res
def action_forward(self, cr, uid, ids, context=None):
lead_obj = self.pool.get('crm.lead')
record = self.browse(cr, uid, ids[0], context=context)
email_template_obj = self.pool.get('email.template')
try:
template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'email_template_lead_forward_mail')[1]
except ValueError:
raise osv.except_osv(_('Email Template Error'),
_('The Forward Email Template is not in the database'))
try:
portal_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'group_portal')[1]
except ValueError:
raise osv.except_osv(_('Portal Group Error'),
_('The Portal group cannot be found'))
local_context = context.copy()
if not (record.forward_type == 'single'):
no_email = set()
for lead in record.assignation_lines:
if lead.partner_assigned_id and not lead.partner_assigned_id.email:
no_email.add(lead.partner_assigned_id.name)
if no_email:
raise osv.except_osv(_('Email Error'),
('Set an email address for the partner(s): %s' % ", ".join(no_email)))
if record.forward_type == 'single' and not record.partner_id.email:
raise osv.except_osv(_('Email Error'),
('Set an email address for the partner %s' % record.partner_id.name))
partners_leads = {}
for lead in record.assignation_lines:
partner = record.forward_type == 'single' and record.partner_id or lead.partner_assigned_id
lead_details = {
'lead_link': lead.lead_link,
'lead_id': lead.lead_id,
}
if partner:
partner_leads = partners_leads.get(partner.id)
if partner_leads:
partner_leads['leads'].append(lead_details)
else:
partners_leads[partner.id] = {'partner': partner, 'leads': [lead_details]}
stage_id = False
if record.assignation_lines and record.assignation_lines[0].lead_id.type == 'lead':
try:
stage_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'stage_portal_lead_assigned')[1]
except ValueError:
pass
for partner_id, partner_leads in partners_leads.items():
in_portal = False
for contact in (partner.child_ids or [partner]):
if contact.user_ids:
in_portal = portal_id in [g.id for g in contact.user_ids[0].groups_id]
local_context['partner_id'] = partner_leads['partner']
local_context['partner_leads'] = partner_leads['leads']
local_context['partner_in_portal'] = in_portal
email_template_obj.send_mail(cr, uid, template_id, ids[0], context=local_context)
lead_ids = [lead['lead_id'].id for lead in partner_leads['leads']]
values = {'partner_assigned_id': partner_id, 'user_id': partner_leads['partner'].user_id.id}
if stage_id:
values['stage_id'] = stage_id
if partner_leads['partner'].user_id:
values['section_id'] = partner_leads['partner'].user_id.default_section_id.id
lead_obj.write(cr, uid, lead_ids, values)
self.pool.get('crm.lead').message_subscribe(cr, uid, lead_ids, [partner_id], context=context)
return True
def get_lead_portal_url(self, cr, uid, lead_id, type, context=None):
action = type == 'opportunity' and 'action_portal_opportunities' or 'action_portal_leads'
try:
action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', action)[1]
except ValueError:
action_id = False
portal_link = "%s/?db=%s#id=%s&action=%s&view_type=form" % (self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url'), cr.dbname, lead_id, action_id)
return portal_link
def get_portal_url(self, cr, uid, ids, context=None):
portal_link = "%s/?db=%s" % (self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url'), cr.dbname)
return portal_link
_columns = {
'forward_type': fields.selection([('single', 'a single partner: manual selection of partner'), ('assigned', "several partners: automatic assignation, using GPS coordinates and partner's grades"), ], 'Forward selected leads to'),
'partner_id': fields.many2one('res.partner', 'Forward Leads To'),
'assignation_lines': fields.one2many('crm.lead.assignation', 'forward_id', 'Partner Assignation'),
'body': fields.html('Contents', help='Automatically sanitized HTML contents'),
}
_defaults = {
'forward_type': lambda self, cr, uid, c: c.get('forward_type') or 'single',
}
class crm_lead_assignation (osv.TransientModel):
_name = 'crm.lead.assignation'
_columns = {
'forward_id': fields.many2one('crm.lead.forward.to.partner', 'Partner Assignation'),
'lead_id': fields.many2one('crm.lead', 'Lead'),
'lead_location': fields.char('Lead Location', size=128),
'partner_assigned_id': fields.many2one('res.partner', 'Assigned Partner'),
'partner_location': fields.char('Partner Location', size=128),
'lead_link': fields.char('Lead Single Links', size=128),
}
def on_change_lead_id(self, cr, uid, ids, lead_id, context=None):
if not context:
context = {}
if not lead_id:
return {'value': {'lead_location': False}}
lead = self.pool.get('crm.lead').browse(cr, uid, lead_id, context=context)
lead_location = []
if lead.country_id:
lead_location.append(lead.country_id.name)
if lead.city:
lead_location.append(lead.city)
return {'value': {'lead_location': ", ".join(lead_location)}}
def on_change_partner_assigned_id(self, cr, uid, ids, partner_assigned_id, context=None):
if not context:
context = {}
if not partner_assigned_id:
return {'value': {'lead_location': False}}
partner = self.pool.get('res.partner').browse(cr, uid, partner_assigned_id, context=context)
partner_location = []
if partner.country_id:
partner_location.append(partner.country_id.name)
if partner.city:
partner_location.append(partner.city)
return {'value': {'partner_location': ", ".join(partner_location)}}
# # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 6,313,116,168,425,577,000 | 49.504762 | 236 | 0.591175 | false |
rockyzhang/zhangyanhit-python-for-android-mips | python-modules/twisted/twisted/test/test_abstract.py | 61 | 2546 | # Copyright (c) 2007 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for generic file descriptor based reactor support code.
"""
from twisted.trial.unittest import TestCase
from twisted.internet.abstract import isIPAddress
class AddressTests(TestCase):
"""
Tests for address-related functionality.
"""
def test_decimalDotted(self):
"""
L{isIPAddress} should return C{True} for any decimal dotted
representation of an IPv4 address.
"""
self.assertTrue(isIPAddress('0.1.2.3'))
self.assertTrue(isIPAddress('252.253.254.255'))
def test_shortDecimalDotted(self):
"""
L{isIPAddress} should return C{False} for a dotted decimal
representation with fewer or more than four octets.
"""
self.assertFalse(isIPAddress('0'))
self.assertFalse(isIPAddress('0.1'))
self.assertFalse(isIPAddress('0.1.2'))
self.assertFalse(isIPAddress('0.1.2.3.4'))
def test_invalidLetters(self):
"""
L{isIPAddress} should return C{False} for any non-decimal dotted
representation including letters.
"""
self.assertFalse(isIPAddress('a.2.3.4'))
self.assertFalse(isIPAddress('1.b.3.4'))
def test_invalidPunctuation(self):
"""
L{isIPAddress} should return C{False} for a string containing
strange punctuation.
"""
self.assertFalse(isIPAddress(','))
self.assertFalse(isIPAddress('1,2'))
self.assertFalse(isIPAddress('1,2,3'))
self.assertFalse(isIPAddress('1.,.3,4'))
def test_emptyString(self):
"""
L{isIPAddress} should return C{False} for the empty string.
"""
self.assertFalse(isIPAddress(''))
def test_invalidNegative(self):
"""
L{isIPAddress} should return C{False} for negative decimal values.
"""
self.assertFalse(isIPAddress('-1'))
self.assertFalse(isIPAddress('1.-2'))
self.assertFalse(isIPAddress('1.2.-3'))
self.assertFalse(isIPAddress('1.2.-3.4'))
def test_invalidPositive(self):
"""
L{isIPAddress} should return C{False} for a string containing
positive decimal values greater than 255.
"""
self.assertFalse(isIPAddress('256.0.0.0'))
self.assertFalse(isIPAddress('0.256.0.0'))
self.assertFalse(isIPAddress('0.0.256.0'))
self.assertFalse(isIPAddress('0.0.0.256'))
self.assertFalse(isIPAddress('256.256.256.256'))
| apache-2.0 | -247,295,502,386,415,600 | 29.674699 | 74 | 0.626866 | false |
mylene-campana/hpp-rbprm-corba | script/tests/hrp2_city1_path.py | 1 | 9565 | #/usr/bin/env python
# author: Mylene Campana (mcampana@laas.fr)
# Script which goes with hpp-rbprm-corba package.
# The script launches a skeleton-robot and a groundcrouch environment.
# It defines init and final configs, and solve them with RBPRM.
# Range Of Motions are spheres linked to the 4 end-effectors
#blender/urdf_to_blender.py -p rbprmBuilder/ -i /local/mcampana/devel/hpp/src/animals_description/urdf/skeleton.urdf -o skeleton_blend.py
from hpp.corbaserver import Client
from hpp.corbaserver.rbprm.problem_solver import ProblemSolver
from hpp.corbaserver.rbprm.rbprmbuilder import Builder
from hpp.gepetto import Viewer, PathPlayer
import math
from viewer_library import *
rootJointType = 'freeflyer'
packageName = 'hpp-rbprm-corba'
meshPackageName = 'hpp-rbprm-corba'
urdfName = 'hrp2_trunk_flexible'
urdfNameRoms = ['hrp2_lleg_rom','hrp2_rleg_rom']
urdfSuffix = ""
srdfSuffix = ""
ecsSize = 4
rbprmBuilder = Builder () # RBPRM
rbprmBuilder.loadModel(urdfName, urdfNameRoms, rootJointType, meshPackageName, packageName, urdfSuffix, srdfSuffix)
#rbprmBuilder.setJointBounds ("base_joint_xyz", [-140, 120, -80, 65, 1, 170])
rbprmBuilder.setJointBounds ("base_joint_xyz", [-140, 120, -80, 65, 10, 170])
rbprmBuilder.boundSO3([-0.2,0.2,-3.14,3.14,-0.3,0.3])
rbprmBuilder.setContactSize (0.03,0.08)
rbprmBuilder.client.basic.robot.setDimensionExtraConfigSpace(ecsSize)
rbprmBuilder.client.basic.robot.setExtraConfigSpaceBounds([0,0,0,0,0,0,-3.14,3.14])
ps = ProblemSolver (rbprmBuilder)
ps.client.problem.selectPathValidation("RbprmPathValidation",0.05) # also configValidation
ps.selectPathPlanner("BallisticPlanner") # "PRMplanner"#rbprmBuilder.setFullOrientationMode(True) # RB-shooter follow obstacle-normal orientation
rbprmBuilder.setFrictionCoef(1.2)
rbprmBuilder.setMaxTakeoffVelocity(30)#(8)
rbprmBuilder.setMaxLandingVelocity(30)
ps.client.problem.selectConFigurationShooter("RbprmShooter")
ps.client.problem.selectSteeringMethod("SteeringParabola")
rbprmBuilder.setNumberFilterMatch(0)
r = Viewer (ps); gui = r.client.gui
r(rbprmBuilder.getCurrentConfig ())
pp = PathPlayer (rbprmBuilder.client.basic, r)
r.loadObstacleModel ("iai_maps", "buildings_reduced", "buildings_reduced")
addLight (r, [-3,0,8,1,0,0,0], "li");
# Configs : [x, y, z, q1, q2, q3, q4, dir.x, dir.y, dir.z, theta]
q11 = rbprmBuilder.getCurrentConfig ()
q11[(len(q11)-4):]=[0,0,1,0] # set normal for init / goal config
# q11[0:7] = [16,45,100, 1, 0, 0, 0]; r(q11)# toit en X
#q11[0:7] = [0,27,72.3, 1, 0, 0, 0]; r(q11) # first roof of big tower
#q11[0:7] = [-100,45,0.4, 1, 0, 0, 0]; r(q11) # on floor
#q11[0:7] = [-105,20,29.4, 1, 0, 0, 0]; r(q11) # roof of house
#q11[0:7] = [55,60,0.3, 1, 0, 0, 0]; r(q11) # floor, right side
q11[0:7] = [-11.8,38.2,120.9, 1, 0, 0, 0]; r(q11) # highest tower
rbprmBuilder.isConfigValid(q11)
q22 = q11[::]
#q22[0:7] = [55,60,0.3, 1, 0, 0, 0]; r(q22) # floor, right side
#q22[0:7] = [-11.6,38.5,120.8, 1, 0, 0, 0]; r(q22) # highest tower
q22[0:7] = [16,45,100.5, 1, 0, 0, 0]; r(q22) #toit en X
#q22[0:7] = [-110,20,29.2, 1, 0, 0, 0]; r(q22) #house on left side
#q22[0:7] = [90,40,20.5, 1, 0, 0, 0]; r(q22) #right house
rbprmBuilder.isConfigValid(q22)
ps.clearRoadmap();
ps.setInitialConfig (q11); ps.addGoalConfig (q22)
#r.solveAndDisplay("rm",1,1)
## manually add way point (faster computation for test, work without but it's slow (~ <1minute )
"""
waypoints = [[20.075492263329966,
45.67270834760806,
100.0368335278786,
1,
0,
0,
0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0] ,
[4,
24,
72.36757488910698,
0.6025437481958323,
-0.014994289380592305,
0.36339178566529046,
-0.7103960957853586,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0],
[17.90089886471105,
20.51569231026736,
37.4,
0.9780744240181991,
-0.009709317338437355,
0.023538837001709934,
0.20669318660975794,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0]]
pbCl = rbprmBuilder.client.basic.problem
pbCl.addConfigToRoadmap (waypoints[0])
pbCl.addConfigToRoadmap (waypoints[1])
pbCl.addConfigToRoadmap (waypoints[2])
ps.directPath (q11, waypoints[0],True); pathIds0 = ps.numberPaths () - 1
ps.directPath (waypoints[0], waypoints[1],True); pathId01 = ps.numberPaths () - 1
ps.directPath (waypoints[1], waypoints[2],True); pathId12 = ps.numberPaths () - 1
ps.directPath (waypoints[2], q22,True); pathId2g = ps.numberPaths () - 1
pbCl.addEdgeToRoadmap (q11, waypoints[0], pathIds0, True)
pbCl.addEdgeToRoadmap (waypoints[0], waypoints[1], pathId01, True)
pbCl.addEdgeToRoadmap (waypoints[1], waypoints[2], pathId12, True)
pbCl.addEdgeToRoadmap (waypoints[2], q22, pathId2g, True)
##########
"""
t = ps.solve ()
solutionPathId = ps.numberPaths () - 1
pp.displayPath(solutionPathId, [0.0, 0.0, 0.8, 1.0])
rbprmBuilder.rotateAlongPath (solutionPathId,True)
orientedpathId = ps.numberPaths () - 1
#pp(orientedpathId)
r(pp.client.problem.configAtParam(orientedpathId,0))
V0list = rbprmBuilder.getsubPathsV0Vimp("V0",solutionPathId)
Vimplist = rbprmBuilder.getsubPathsV0Vimp("Vimp",solutionPathId)
print("Verify that all RB-waypoints are valid: ")
pathWaypoints = ps.getWaypoints(solutionPathId)
for i in range(1,len(pathWaypoints)-1):
if(not(rbprmBuilder.isConfigValid(pathWaypoints[i])[0])):
print('problem with waypoints number: ' + str(i))
#plotConeWaypoints (rbprmBuilder, solutionPathId, r, "cone_wp_group", "friction_cone_WP2")
#plotCone (q11, rbprmBuilder, r, "cone_11", "friction_cone2"); plotCone (q22, rbprmBuilder, r, "cone_21", "friction_cone2")
rob = rbprmBuilder.client.basic.robot
r(q11)
# Move RB-robot away in viewer
qAway = q11 [::]; qAway[2] = -5;
rbprmBuilder.setCurrentConfig (qAway); r(qAway)
## DEBUG tools ##
"""
cl.obstacle.getObstaclePosition('decor_base')
rbprmBuilder.isConfigValid(q1)
rbprmBuilder.setCurrentConfig(q1)
res=rbprmBuilder.distancesToCollision()
r( ps.configAtParam(0,5) )
ps.optimizePath (0)
ps.clearRoadmap ()
ps.resetGoalConfigs ()
from numpy import *
argmin(rbprmBuilder.distancesToCollision()[0])
rbprmBuilder.getJointNames ()
rbprmBuilder.getConfigSize ()
rbprmBuilder.client.rbprm.rbprm.isRbprmValid (q22)
r.client.gui.getNodeList()
rbprmBuilder.client.rbprm.rbprm.setRbShooter ()
q = rbprmBuilder.client.rbprm.rbprm.rbShoot ()
r(q)
rbprmBuilder.client.rbprm.rbprm.isRbprmValid (q)
rbprmBuilder.client.rbprm.rbprm.setRbShooter ()
r(rbprmBuilder.client.rbprm.rbprm.rbShoot ())
ps.client.problem.getResultValues ()
"""
## 3D viewer tools ##
"""
plotFrame (r, 'frame_group', [0,0,0], 0.6)
gui.removeFromGroup("path0",r.sceneName)
gui.getNodeList()
ps.numberNodes()
pathSamples = plotSampleSubPath (cl, r, pathId, 70, "path0", [0,0,1,1])
plotCone (q1, cl, r, "cone_first", "friction_cone_SG2"); plotCone (q2, cl, r, "cone_second", "friction_cone_SG2")
plotConeWaypoints (cl, pathId, r, "cone_wp_group", "friction_cone_WP2")
# Plot cones and edges in viewer
plotConesRoadmap (cl, r, 'cone_rm_group', "friction_cone2")
plotEdgesRoadmap (cl, r, 'edgeGroup', 70, [0,1,0.2,1])
gui = r.client.gui
gui.setCaptureTransform ("frames.yaml ", ["skeleton_trunk_flexible"])
q = q11
r (q); cl.rbprmBuilder.setCurrentConfig(q)
gui.refresh (); gui.captureTransform ()
gui.setVisibility('skeleton_trunk_flexible/thorax_rhand_rom',"OFF")
q = q_goal_test [0:7]
q[0] = q[0] + 1; q[2] = q[2] + 1
gui.addLight ("li", r.windowId, 0.0001, [0.9,0.9,0.9,1])
gui.addToGroup ("li", r.sceneName)
gui.applyConfiguration ("li", q)
gui.refresh ()
"""
## Export path to BLENDER ##
"""
import numpy as np
pathId = 0; dt = 0.05; gui.setCaptureTransform ("skeleton_trunk_path.yaml", ["skeleton_trunk_flexible"])
PL = ps.pathLength(pathId)
FrameRange = np.arange(0,PL,dt)
numberFrame = len(FrameRange)
# test frame capture
q = q11; r (q); gui.refresh (); gui.captureTransform ()
q = q22; r (q); gui.refresh (); gui.captureTransform ()
# capture path
for t in FrameRange:
q = ps.configAtParam (pathId, t)#update robot configuration
r (q); gui.refresh (); gui.captureTransform ()
r (q22); gui.refresh (); gui.captureTransform ()
"""
""" # Manually add waypoints to roadmap:
pbCl = rbprmBuilder.client.basic.problem
pbCl.addConfigToRoadmap (waypoints[0])
pbCl.addConfigToRoadmap (waypoints[1])
pbCl.addConfigToRoadmap (waypoints[2])
ps.directPath (q11, waypoints[0]); pathIds0 = ps.numberPaths () - 1
ps.directPath (waypoints[0], waypoints[1]); pathId01 = ps.numberPaths () - 1
ps.directPath (waypoints[1], waypoints[2]); pathId12 = ps.numberPaths () - 1
ps.directPath (waypoints[2], q22); pathId2g = ps.numberPaths () - 1
pbCl.addEdgeToRoadmap (q11, waypoints[0], pathIds0, True)
pbCl.addEdgeToRoadmap (waypoints[0], waypoints[1], pathId01, True)
pbCl.addEdgeToRoadmap (waypoints[1], waypoints[2], pathId12, True)
pbCl.addEdgeToRoadmap (waypoints[2], q22, pathId2g, True)
pbCl.saveRoadmap ('/local/mcampana/devel/hpp/data/skeleton_test_path.rdm')
ps.readRoadmap ('/local/mcampana/devel/hpp/data/skeleton_test_path.rdm')
"""
""" #### display
id = r.client.gui.getWindowID("window_hpp_")
r.client.gui.attachCameraToNode("spiderman_trunk/base_link",id)
ps.clearRoadmap()
gui.removeFromGroup("path_1_root",r.sceneName)
ps.solve()
solutionPathId = ps.numberPaths () - 1
pp.displayPath(solutionPathId, [0.0, 0.0, 0.8, 1.0])
rbprmBuilder.rotateAlongPath (solutionPathId)
orientedpathId = ps.numberPaths () - 1
r(pp.client.problem.configAtParam(orientedpathId,0))
pp(orientedpathId)
q11 = ps.node(0)
q22 = ps.node(1)
plotCone (q11, ps, r, "cone_first", "friction_cone_SG2");
plotCone (q22, ps, r, "cone_second", "friction_cone_SG2")
"""
| lgpl-3.0 | 1,061,759,997,378,215,700 | 30.055195 | 145 | 0.715316 | false |
Weihonghao/ECM | Vpy34/lib/python3.5/site-packages/pandas/compat/numpy/__init__.py | 3 | 2213 | """ support numpy compatiblitiy across versions """
import re
import numpy as np
from distutils.version import LooseVersion
from pandas.compat import string_types, string_and_binary_types
# numpy versioning
_np_version = np.__version__
_nlv = LooseVersion(_np_version)
_np_version_under1p8 = _nlv < '1.8'
_np_version_under1p9 = _nlv < '1.9'
_np_version_under1p10 = _nlv < '1.10'
_np_version_under1p11 = _nlv < '1.11'
_np_version_under1p12 = _nlv < '1.12'
_np_version_under1p13 = _nlv < '1.13'
if _nlv < '1.7.0':
raise ImportError('this version of pandas is incompatible with '
'numpy < 1.7.0\n'
'your numpy version is {0}.\n'
'Please upgrade numpy to >= 1.7.0 to use '
'this pandas version'.format(_np_version))
_tz_regex = re.compile('[+-]0000$')
def tz_replacer(s):
if isinstance(s, string_types):
if s.endswith('Z'):
s = s[:-1]
elif _tz_regex.search(s):
s = s[:-5]
return s
def np_datetime64_compat(s, *args, **kwargs):
"""
provide compat for construction of strings to numpy datetime64's with
tz-changes in 1.11 that make '2015-01-01 09:00:00Z' show a deprecation
warning, when need to pass '2015-01-01 09:00:00'
"""
if not _np_version_under1p11:
s = tz_replacer(s)
return np.datetime64(s, *args, **kwargs)
def np_array_datetime64_compat(arr, *args, **kwargs):
"""
provide compat for construction of an array of strings to a
np.array(..., dtype=np.datetime64(..))
tz-changes in 1.11 that make '2015-01-01 09:00:00Z' show a deprecation
warning, when need to pass '2015-01-01 09:00:00'
"""
if not _np_version_under1p11:
# is_list_like
if hasattr(arr, '__iter__') and not \
isinstance(arr, string_and_binary_types):
arr = [tz_replacer(s) for s in arr]
else:
arr = tz_replacer(arr)
return np.array(arr, *args, **kwargs)
__all__ = ['np',
'_np_version_under1p8',
'_np_version_under1p9',
'_np_version_under1p10',
'_np_version_under1p11',
'_np_version_under1p12',
]
| agpl-3.0 | -806,873,962,559,849,900 | 27.74026 | 74 | 0.584727 | false |
bukzor/sympy | sympy/matrices/expressions/tests/test_trace.py | 83 | 2693 | from sympy.core import Lambda, S, symbols
from sympy.concrete import Sum
from sympy.functions import adjoint, conjugate, transpose
from sympy.matrices import eye, Matrix, ShapeError, ImmutableMatrix
from sympy.matrices.expressions import (
Adjoint, Identity, FunctionMatrix, MatrixExpr, MatrixSymbol, Trace,
ZeroMatrix, trace, MatPow, MatAdd, MatMul
)
from sympy.utilities.pytest import raises, XFAIL
n = symbols('n', integer=True)
A = MatrixSymbol('A', n, n)
B = MatrixSymbol('B', n, n)
C = MatrixSymbol('C', 3, 4)
def test_Trace():
assert isinstance(Trace(A), Trace)
assert not isinstance(Trace(A), MatrixExpr)
raises(ShapeError, lambda: Trace(C))
assert trace(eye(3)) == 3
assert trace(Matrix(3, 3, [1, 2, 3, 4, 5, 6, 7, 8, 9])) == 15
assert adjoint(Trace(A)) == trace(Adjoint(A))
assert conjugate(Trace(A)) == trace(Adjoint(A))
assert transpose(Trace(A)) == Trace(A)
A / Trace(A) # Make sure this is possible
# Some easy simplifications
assert trace(Identity(5)) == 5
assert trace(ZeroMatrix(5, 5)) == 0
assert trace(2*A*B) == 2*Trace(A*B)
assert trace(A.T) == trace(A)
i, j = symbols('i j')
F = FunctionMatrix(3, 3, Lambda((i, j), i + j))
assert trace(F) == (0 + 0) + (1 + 1) + (2 + 2)
raises(TypeError, lambda: Trace(S.One))
assert Trace(A).arg is A
assert str(trace(A)) == str(Trace(A).doit())
def test_Trace_A_plus_B():
assert trace(A + B) == Trace(A) + Trace(B)
assert Trace(A + B).arg == MatAdd(A, B)
assert Trace(A + B).doit() == Trace(A) + Trace(B)
def test_Trace_MatAdd_doit():
# See issue #9028
X = ImmutableMatrix([[1, 2, 3]]*3)
Y = MatrixSymbol('Y', 3, 3)
q = MatAdd(X, 2*X, Y, -3*Y)
assert Trace(q).arg == q
assert Trace(q).doit() == 18 - 2*Trace(Y)
def test_Trace_MatPow_doit():
X = Matrix([[1, 2], [3, 4]])
assert Trace(X).doit() == 5
q = MatPow(X, 2)
assert Trace(q).arg == q
assert Trace(q).doit() == 29
def test_Trace_MutableMatrix_plus():
# See issue #9043
X = Matrix([[1, 2], [3, 4]])
assert Trace(X) + Trace(X) == 2*Trace(X)
def test_Trace_doit_deep_False():
X = Matrix([[1, 2], [3, 4]])
q = MatPow(X, 2)
assert Trace(q).doit(deep=False).arg == q
q = MatAdd(X, 2*X)
assert Trace(q).doit(deep=False).arg == q
q = MatMul(X, 2*X)
assert Trace(q).doit(deep=False).arg == q
def test_trace_constant_factor():
# Issue 9052: gave 2*Trace(MatMul(A)) instead of 2*Trace(A)
assert trace(2*A) == 2*Trace(A)
X = ImmutableMatrix([[1, 2], [3, 4]])
assert trace(MatMul(2, X)) == 10
@XFAIL
def test_rewrite():
assert isinstance(trace(A).rewrite(Sum), Sum)
| bsd-3-clause | -2,727,246,780,708,139,000 | 27.347368 | 71 | 0.606387 | false |
adit-chandra/tensorflow | tensorflow/lite/testing/op_tests/tile.py | 4 | 2453 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for tile."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_tile_tests(options):
"""Make a set of tests to do tile."""
test_parameters = [{
"input_dtype": [tf.float32, tf.int32, tf.bool],
"input_shape": [[3, 2, 1], [2, 2, 2]],
"multiplier_dtype": [tf.int32, tf.int64],
"multiplier_shape": [[3]]
}]
def build_graph(parameters):
"""Build the tile op testing graph."""
input_value = tf.compat.v1.placeholder(
dtype=parameters["input_dtype"],
shape=parameters["input_shape"],
name="input")
multiplier_value = tf.compat.v1.placeholder(
dtype=parameters["multiplier_dtype"],
shape=parameters["multiplier_shape"],
name="multiplier")
out = tf.tile(input_value, multiplier_value)
return [input_value, multiplier_value], [out]
def build_inputs(parameters, sess, inputs, outputs):
input_value = create_tensor_data(parameters["input_dtype"],
parameters["input_shape"])
multipliers_value = create_tensor_data(
parameters["multiplier_dtype"],
parameters["multiplier_shape"],
min_value=0)
return [input_value, multipliers_value], sess.run(
outputs,
feed_dict={
inputs[0]: input_value,
inputs[1]: multipliers_value
})
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| apache-2.0 | 617,433,764,874,577,800 | 37.936508 | 80 | 0.657155 | false |
dpinney/omf | omf/solvers/VB.py | 1 | 29740 | import pandas as pd
import pulp
import numpy as np
from numpy import *
class VirtualBattery(object):
""" Base class for abstraction. """
def __init__(self, ambient_temp, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number):
# C :thermal capacitance
# R : thermal resistance
# P: rated power (kW) of each TCL
# eta: COP
# delta: temperature deadband
# theta_s: temperature setpoint
# N: number of TCL
# ambient: ambient temperature
self.ambient = ambient_temp
self.C = capacitance
self.R = resistance
self.P = rated_power
self.eta = COP
self.delta = deadband
self.theta_s = setpoint
self.N = tcl_number
def generate(self, participation_number, P0_number):
""" Main calculation happens here. """
#heuristic function of participation
atan = np.arctan
participation = participation_number
P0 = P0_number
P0[P0 < 0] = 0.0 # set negative power consumption to 0
p_lower = self.N*participation*P0 # aggregated baseline power consumption considering participation
p_upper = self.N*participation*(self.P - P0)
p_upper[p_upper < 0] = 0.0 # set negative power upper bound to 0
e_ul = self.N*participation*self.C*self.delta/2/self.eta
return p_lower, p_upper, e_ul
class AC(VirtualBattery):
""" Derived Class for specifically AC Virtual Battery. """
def __init__(self, theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number):
super(AC, self).__init__(theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number)
# self.tcl_idx = tcl_idx
self.theta_a = self.ambient # theta_a == ambient temperature
def generate(self):
#heuristic function of participation
atan = np.arctan
# participation for AC
Ta = np.linspace(20, 45, num=51)
participation = (atan(self.theta_a-27) - atan(Ta[0]-27))/((atan(Ta[-1]-27) - atan(Ta[0]-27)))
participation = np.clip(participation, 0, 1)
#P0 for AC
P0 = (self.theta_a - self.theta_s)/self.R/self.eta # average baseline power consumption for the given temperature setpoint
return super(AC, self).generate(participation, P0)
class HP(VirtualBattery):
""" Derived Class for specifically HP Virtual Battery. """
def __init__(self, theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number):
super(HP, self).__init__(theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number)
# self.tcl_idx = tcl_idx
self.theta_a = self.ambient # theta_a == ambient temperature
def generate(self):
#heuristic function of participation
atan = np.arctan
# participation for HP
Ta = np.linspace(0, 25, num=51)
participation = 1-(atan(self.theta_a-10) - atan(Ta[0]-10))/((atan(Ta[-1]-10) - atan(Ta[0]-10)))
participation = np.clip(participation, 0, 1)
#P0 for HP
P0 = (self.theta_s - self.theta_a)/self.R/self.eta
return super(HP, self).generate(participation, P0)
class RG(VirtualBattery):
""" Derived Class for specifically RG Virtual Battery. """
def __init__(self, theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number):
super(RG, self).__init__(theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number)
# self.tcl_idx = tcl_idx
self.theta_a = self.ambient # theta_a == ambient temperature
def generate(self):
#heuristic function of participation
atan = np.arctan
# participation for RG
participation = np.ones(self.theta_a.shape)
participation = np.clip(participation, 0, 1)
#P0 for RG
P0 = (self.theta_a - self.theta_s)/self.R/self.eta # average baseline power consumption for the given temperature setpoint
return super(RG, self).generate(participation, P0)
class WH(VirtualBattery):
""" Derived class for specifically Water Heater Virtual Battery. """
N_wh = 50
def __init__(self, theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number,Tout, water):
super(WH, self).__init__(theta_a, capacitance, resistance, rated_power, COP, deadband, setpoint, tcl_number)
self.C_wh = self.C*np.ones((self.N_wh, 1)) # thermal capacitance, set in parent class
self.R_wh = self.R*np.ones((self.N_wh, 1)) # thermal resistance
self.P_wh = self.P*np.ones((self.N_wh, 1)) # rated power (kW) of each TCL
self.delta_wh = self.delta*np.ones((self.N_wh, 1)) # temperature deadband
self.theta_s_wh = self.theta_s*np.ones((self.N_wh, 1)) # temperature setpoint
self.Tout=Tout
self.water = water
# self.N = self.para[6] # number of TCL
def calculate_twat(self,tout_avg,tout_madif):
tout_avg=tout_avg/5*9+32
tout_madif=tout_madif/5*9
ratio = 0.4 + 0.01 * (tout_avg - 44)
lag = 35 - 1.0 * (tout_avg - 44)
twat = 1*np.ones((365*24*60,1))
for i in range(365):
for j in range(60*24):
twat[i*24*60+j]= (tout_avg+6)+ratio*(tout_madif/ 2) * sin((0.986 * (i - 15 - lag) - 90)/180*3.14)
twat=(twat-32.)/9.*5.
return twat
def prepare_pare_for_calculate_twat(self,tou_raw):
tout_avg = sum(tou_raw)/len(tou_raw)
mon=[31,28,31,30,31,30,31,31,30,31,30,31]
mon_ave=1*np.ones((12,1))
mon_ave[1]=sum(tou_raw[0:mon[1]*24])/mon[1]/24
stop=mon[1]*24
for idx in range(1,len(mon)):
mon_ave[idx]=sum(tou_raw[stop:stop+mon[idx]*24])/mon[idx]/24;
tou_madif=max(mon_ave)- min(mon_ave)
return tout_avg, tou_madif
def generate(self):
# theta_a is the ambient temperature
# theta_a = (72-32)*5.0/9*np.ones((365, 24*60)) # This is a hard-coded 72degF, converted to degCel
theta_a = self.ambient#*np.ones((365, 24*60)) # theta_a == ambient temperature
#nRow, nCol = theta_a.shape
nRow, nCol = 365, 24*60
theta_a = np.reshape(theta_a, [nRow*nCol, 1])
Tout1min= np.zeros((size(theta_a)));
for i in range(len(self.Tout)):
theta_a[i]= (self.Tout[i]+self.ambient[i])/2; # CHANGED THIS
# h is the model time discretization step in seconds
h = 60
#T is the number of time step considered, i.e., T = 365*24*60 means a year
# with 1 minute time discretization
T = len(theta_a)
tou_avg,maxdiff=self.prepare_pare_for_calculate_twat(self.Tout)
twat=self.calculate_twat(tou_avg,maxdiff);
# print twat
# theta_lower is the temperature lower bound
theta_lower_wh = self.theta_s_wh - self.delta_wh/2.0
# theta_upper is the temperature upper bound
theta_upper_wh = self.theta_s_wh + self.delta_wh/2.0
# m_water is the water draw in unit of gallon per minute
m_water = self.water#np.genfromtxt("Flow_raw_1minute_BPA.csv", delimiter=',')[1:, 1:]
where_are_NaNs = isnan(m_water)
m_water[where_are_NaNs] = 0
m_water = m_water *0.00378541178*1000/h
m_water_row, m_water_col = m_water.shape
water_draw = np.zeros((m_water_row, int(self.N_wh)))
for i in range(int(self.N_wh)):
k = np.random.randint(m_water_col)
water_draw[:, i] = np.roll(m_water[:, k], (1, np.random.randint(-14, 1))) + m_water[:, k] * 0.1 * (np.random.random() - 0.5)
# k = m_water_col - 1
# print(k)
# raise(ArgumentError, "Stop here")
# water_draw[:, i] = m_water[:, k]
first = -(
np.matmul(theta_a, np.ones((1, self.N_wh)))
- np.matmul(np.ones((T, 1)), self.theta_s_wh.transpose())
)
# print(np.argwhere(np.isnan(first)))
second = np.matmul(np.ones((T, 1)), self.R_wh.transpose())
# print(np.argwhere(np.isnan(second)))
Po = (
first
/ second
- 4.2
* np.multiply(water_draw, (55-32) * 5/9.0 - np.matmul(np.ones((T, 1)), self.theta_s_wh.transpose()))
)
# print(water_draw.shape)
# print(len(water_draw[:1]))
# Po_total is the analytically predicted aggregate baseline power
Po_total = np.sum(Po, axis=1)
upper_limit = np.sum(self.P_wh, axis=0)
# print(np.argwhere(np.isnan(water_draw)))
Po_total[Po_total > upper_limit[0]] = upper_limit
# theta is the temperature of TCLs
theta = np.zeros((self.N_wh, T))
theta[:, 0] = self.theta_s_wh.reshape(-1)
# m is the indicator of on-off state: 1 is on, 0 is off
m = np.ones((self.N_wh, T))
m[:int(self.N_wh*0.8), 0] = 0
for t in range(T - 1):
theta[:, t+1] = (
(1 - h/(self.C_wh * 3600) / self.R_wh).reshape(-1)
* theta[:, t]
+ (h / (self.C_wh * 3600) / self.R_wh).reshape(-1)
* theta_a[t]
+ ((h/(self.C_wh * 3600))*self.P_wh).reshape(-1)*m[:, t]
)
m[theta[:, t+1] > (theta_upper_wh).reshape(-1), t+1] = 0
m[theta[:, t+1] < (theta_lower_wh).reshape(-1), t+1] = 1
m[(theta[:, t+1] >= (theta_lower_wh).reshape(-1)) & (theta[:, t+1] <= (theta_upper_wh).reshape(-1)), t+1] = m[(theta[:, t+1] >= (theta_lower_wh).reshape(-1)) & (theta[:, t+1] <= (theta_upper_wh).reshape(-1)), t]
theta[:, 0] = theta[:, -1]
m[:, 0] = m[:, -1]
# Po_total_sim is the predicted aggregate baseline power using simulations
Po_total_sim = np.zeros((T, 1))
Po_total_sim[0] = np.sum(m[:, 0]*(self.P_wh.reshape(-1)))
for t in range(T - 1):
# print t
theta[:, t+1] = (1 - h/(self.C_wh * 3600)/self.R_wh).reshape(-1) * theta[:, t] + (h/(self.C_wh * 3600)/self.R_wh).reshape(-1)*theta_a[t] + (h/(self.C_wh*3600)).reshape(-1)*m[:, t]*self.P_wh.reshape(-1) + h*4.2*water_draw[t, :].transpose() * (twat[t] -theta[:, t]) / ((self.C_wh*3600).reshape(-1))
m[theta[:, t+1] > (theta_upper_wh).reshape(-1), t+1] = 0
m[theta[:, t+1] < (theta_lower_wh).reshape(-1), t+1] = 1
m[(theta[:, t+1] >= (theta_lower_wh).reshape(-1)) & (theta[:, t+1] <= (theta_upper_wh).reshape(-1)), t+1] = m[(theta[:, t+1] >= (theta_lower_wh).reshape(-1)) & (theta[:, t+1] <= (theta_upper_wh).reshape(-1)), t]
Po_total_sim[t+1] = np.sum(m[:, t+1] * self.P_wh.reshape(-1))
index_available = np.ones((self.N_wh, T))
for t in range(T - 1):
index_available[(theta[:, t] < (theta_lower_wh-0.5).reshape(-1)) | (theta[:, t] > (theta_upper_wh+0.5).reshape(-1)), t] = 0
# Virtual battery parameters
p_upper_wh1 = np.sum(self.P_wh) - Po_total_sim
p_lower_wh1 = Po_total_sim
e_ul_wh1 = np.sum((np.matmul(self.C_wh, np.ones((1, T))) * np.matmul(self.delta_wh, np.ones((1, T))) / 2 * index_available).transpose(), axis=1)
# calculate hourly average data from minute output for power
p_upper_wh1 = np.reshape(p_upper_wh1, [8760,60])
p_upper_wh = np.mean(p_upper_wh1, axis=1)*float(self.N)/float(self.N_wh)
p_lower_wh1 = np.reshape(p_lower_wh1, [8760,60])
p_lower_wh = np.mean(p_lower_wh1, axis=1)*float(self.N)/float(self.N_wh)
# extract hourly data from minute output for energy
e_ul_wh = e_ul_wh1[59:len(e_ul_wh1):60]*float(self.N)/float(self.N_wh)
return p_lower_wh, p_upper_wh, e_ul_wh
# ------------------------STACKED CODE FROM PNNL----------------------------- #
def run_fhec(ind, gt_demand, Input):
use_hour = int(ind["userHourLimit"]) # number of VB use hours specified by the user
epsilon = 1 #float(ind["energyReserve"]) # energy reserve parameter, range: 0 - 1
fhec_kwh_rate = float(ind["electricityCost"]) # $/kW
fhec_peak_mult = float(ind["peakMultiplier"])
s = sorted(gt_demand)
# peak hours calculation
perc = float(ind["peakPercentile"])
fhec_gt98 = s[int(perc*len(s))]
fhec_peak_hours = []
for idx, val in enumerate(gt_demand):
if val > fhec_gt98:
fhec_peak_hours.extend([idx+1])
fhec_off_peak_hours = []
for i in range(len(gt_demand)):
if i not in fhec_peak_hours:
fhec_off_peak_hours.extend([i+1])
# read the input data, including load profile, VB profile, and regulation price
# Input = pd.read_csv(input_csv, index_col=['Hour'])
# VB model parameters
C = float(ind["capacitance"]) # thermal capacitance
R = float(ind["resistance"]) # thermal resistance
deltaT = 1
alpha = math.exp(-deltaT/(C*R)) # hourly self discharge rate
E_0 = 0 # VB initial energy state
arbitrage_option = ind["use_arbitrage"] == "on"
regulation_option = ind["use_regulation"] == "on"
deferral_option = ind["use_deferral"] == "on"
# calculate the predicted profits for all 8760 hours
use_prft = []
for hour in Input.index:
temp = 0
if arbitrage_option or deferral_option:
if hour in fhec_peak_hours:
temp += fhec_peak_mult*fhec_kwh_rate*(Input.loc[hour, "VB Energy upper (kWh)"]-Input.loc[hour, "VB Energy lower (kWh)"])
if hour in fhec_off_peak_hours:
temp += fhec_kwh_rate*(Input.loc[hour, "VB Energy upper (kWh)"]-Input.loc[hour, "VB Energy lower (kWh)"])
if regulation_option:
temp += (Input.loc[hour, "Reg-up Price ($/MW)"]+Input.loc[hour, "Reg-dn Price ($/MW)"])/1000*(Input.loc[hour, "VB Energy upper (kWh)"]-Input.loc[hour, "VB Energy lower (kWh)"])
use_prft.append({'Hour': hour, 'Profit': temp})
# sort the predicted profits from the highest to the lowest
use_prft = sorted(use_prft, reverse = True, key = lambda i : i['Profit'])
# get the indices of the first use_hour hours, and the optimization will be scheduled only for those hours
use_list = []
for index in range(use_hour):
use_list.append(use_prft[index]['Hour'])
###############################################################################
# start demand charge reduction LP problem
model = pulp.LpProblem("Demand charge minimization problem FHEC-Knievel", pulp.LpMinimize)
# decision variable of VB charging power; dim: 8760 by 1
VBpower = pulp.LpVariable.dicts("ChargingPower", ((hour) for hour in Input.index))
# set bound
for hour in Input.index:
if hour in use_list:
VBpower[hour].lowBound = Input.loc[hour, "VB Power lower (kW)"]
VBpower[hour].upBound = Input.loc[hour, "VB Power upper (kW)"]
if hour not in use_list:
VBpower[hour].lowBound = 0
VBpower[hour].upBound = 0
# decision variable of VB energy state; dim: 8760 by 1
VBenergy = pulp.LpVariable.dicts("EnergyState", ((hour) for hour in Input.index))
# set bound
for hour in Input.index:
VBenergy[hour].lowBound = Input.loc[hour, "VB Energy lower (kWh)"]
VBenergy[hour].upBound = Input.loc[hour, "VB Energy upper (kWh)"]
# decision variable of annual peak demand
PeakDemand = pulp.LpVariable("annual peak demand", lowBound=0)
# decision variable: hourly regulation up capacity; dim: 8760 by 1
reg_up = pulp.LpVariable.dicts("hour reg up", ((hour) for hour in Input.index), lowBound=0)
# decision variable: hourly regulation dn capacity; dim: 8760 by 1
reg_dn = pulp.LpVariable.dicts("hour reg dn", ((hour) for hour in Input.index), lowBound=0)
for hour in Input.index:
if hour not in use_list:
reg_up[hour].upBound = 0
reg_dn[hour].upBound = 0
# objective functions
if (arbitrage_option == False and regulation_option == False and deferral_option == False):
model += 0, "an arbitrary objective function"
if (arbitrage_option == True and regulation_option == False and deferral_option == False):
model += pulp.lpSum([fhec_peak_mult*fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_peak_hours]
+ [fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_off_peak_hours])
if (arbitrage_option == False and regulation_option == True and deferral_option == False):
model += pulp.lpSum([-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index])
if (arbitrage_option == False and regulation_option == False and deferral_option == True):
model += pulp.lpSum(1E03*PeakDemand)
if (arbitrage_option == True and regulation_option == True and deferral_option == False):
model += pulp.lpSum([fhec_peak_mult*fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_peak_hours]
+ [fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_off_peak_hours]
+ [-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index])
if (arbitrage_option == True and regulation_option == False and deferral_option == True):
model += pulp.lpSum([fhec_peak_mult*fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_peak_hours]
+ [fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_off_peak_hours]
+ 1E03*PeakDemand)
if (arbitrage_option == False and regulation_option == True and deferral_option == True):
model += pulp.lpSum([-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index]
+ 1E03*PeakDemand)
if (arbitrage_option == True and regulation_option == True and deferral_option == True):
model += pulp.lpSum([fhec_peak_mult*fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_peak_hours]
+ [fhec_kwh_rate*(Input.loc[hour, "Load (kW)"]+VBpower[hour]) for hour in fhec_off_peak_hours]
+ [-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index]
+ 1E03*PeakDemand)
# VB energy state as a function of VB power
for hour in Input.index:
if hour==1:
model += VBenergy[hour] == alpha*E_0 + VBpower[hour]*deltaT
else:
model += VBenergy[hour] == alpha*VBenergy[hour-1] + VBpower[hour]*deltaT
# hourly regulation constraints
for hour in Input.index:
if regulation_option:
model += reg_up[hour] == reg_dn[hour] # regulation balance
model += VBenergy[hour] - epsilon*reg_up[hour]*deltaT >= VBenergy[hour].lowBound
model += VBenergy[hour] + epsilon*reg_dn[hour]*deltaT <= VBenergy[hour].upBound
else:
model += reg_up[hour] == 0
model += reg_dn[hour] == 0
# extra constraints
for hour in Input.index:
model += PeakDemand >= Input.loc[hour, "Load (kW)"] + VBpower[hour]
model.solve()
###############################################################################
use_hour_indicator = []
for hour in Input.index:
if VBpower[hour].varValue != 0 or reg_up[hour].varValue != 0:
use_hour_indicator.append({'Hour': hour, 'Use': 1})
else:
use_hour_indicator.append({'Hour': hour, 'Use': 0})
output = []
for hour in Input.index:
var_output = {
'Hour': hour,
'VB energy (kWh)': int(100*VBenergy[hour].varValue)/100,
'VB power (kW)': int(100*VBpower[hour].varValue)/100,
'Load (kW)': int(100*Input.loc[hour, "Load (kW)"])/100,
'Net load (kW)': int(100*(VBpower[hour].varValue+Input.loc[hour, "Load (kW)"]))/100,
'Hour used': use_hour_indicator[hour-1]['Use']
}
if regulation_option:
var_regulation = {'Regulation (kW)': int(100*reg_up[hour].varValue)/100}
var_output.update(var_regulation)
output.append(var_output)
output_df = pd.DataFrame.from_records(output)
# output_df.to_csv('fhec_output.csv', index=False)
return output_df
def run_okec(ind, Input):
# Input.to_csv('okec_input.csv', index=False)
use_hour = int(ind["userHourLimit"]) # number of VB use hours specified by the user
epsilon = 1 #float(ind["energyReserve"]) # energy reserve parameter, range: 0 - 1
okec_peak_charge = float(ind["annual_peak_charge"]) # annual peak demand charge $100/kW
okec_avg_demand_charge = float(ind["avg_demand_charge"]) # $120/kW
okec_fuel_charge = float(ind["fuel_charge"]) # total energy $/kWh
# VB model parameters
C = float(ind["capacitance"]) # thermal capacitance
R = float(ind["resistance"]) # thermal resistance
deltaT = 1
alpha = math.exp(-deltaT/(C*R)) # hourly self discharge rate
E_0 = 0 # VB initial energy state
arbitrage_option = ind["use_arbitrage"] == "on"
regulation_option = ind["use_regulation"] == "on"
deferral_option = ind["use_deferral"] == "on"
# calculate the predicted profits for all 8760 hours
use_prft = []
for hour in Input.index:
temp = 0
if arbitrage_option or deferral_option:
temp += okec_avg_demand_charge/len(Input.index)*(Input.loc[hour, "VB Energy upper (kWh)"]-Input.loc[hour, "VB Energy lower (kWh)"])
if regulation_option:
temp += (Input.loc[hour, "Reg-up Price ($/MW)"]+Input.loc[hour, "Reg-dn Price ($/MW)"])/1000*(Input.loc[hour, "VB Energy upper (kWh)"]-Input.loc[hour, "VB Energy lower (kWh)"])
use_prft.append({'Hour': hour, 'Profit': temp})
# sort the predicted profits from the highest to the lowest
use_prft = sorted(use_prft, reverse = True, key = lambda i : i['Profit'])
# get the indices of the first use_hour hours, and the optimization will be scheduled only for those hours
use_list = []
for index in range(use_hour):
use_list.append(use_prft[index]['Hour'])
# start demand charge reduction LP problem
model = pulp.LpProblem("Demand charge minimization problem OKEC-Buffett", pulp.LpMinimize)
# decision variable of VB charging power; dim: 8760 by 1
VBpower = pulp.LpVariable.dicts("ChargingPower", ((hour) for hour in Input.index))
# set bound
for hour in Input.index:
if hour in use_list:
VBpower[hour].lowBound = Input.loc[hour, "VB Power lower (kW)"]
VBpower[hour].upBound = Input.loc[hour, "VB Power upper (kW)"]
if hour not in use_list:
VBpower[hour].lowBound = 0
VBpower[hour].upBound = 0
# decision variable of VB energy state; dim: 8760 by 1
VBenergy = pulp.LpVariable.dicts("EnergyState", ((hour) for hour in Input.index))
# set bound
for hour in Input.index:
VBenergy[hour].lowBound = Input.loc[hour, "VB Energy lower (kWh)"]
VBenergy[hour].upBound = Input.loc[hour, "VB Energy upper (kWh)"]
# decision variable of annual peak demand
PeakDemand = pulp.LpVariable("annual peak demand", lowBound=0)
# decision variable: hourly regulation up capacity; dim: 8760 by 1
reg_up = pulp.LpVariable.dicts("hour reg up", ((hour) for hour in Input.index), lowBound=0)
# decision variable: hourly regulation dn capacity; dim: 8760 by 1
reg_dn = pulp.LpVariable.dicts("hour reg dn", ((hour) for hour in Input.index), lowBound=0)
for hour in Input.index:
if hour not in use_list:
reg_up[hour].upBound = 0
reg_dn[hour].upBound = 0
# objective function: sum of monthly demand charge
if (arbitrage_option == False and regulation_option == False and deferral_option == False):
model += 0, "an arbitrary objective function"
if (arbitrage_option == True and regulation_option == False and deferral_option == False):
model += pulp.lpSum(okec_peak_charge*PeakDemand
+ [okec_avg_demand_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])/len(Input.index) for hour in Input.index]
+ [okec_fuel_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])*deltaT for hour in Input.index])
if (arbitrage_option == False and regulation_option == True and deferral_option == False):
model += pulp.lpSum([-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index])
if (arbitrage_option == False and regulation_option == False and deferral_option == True):
model += pulp.lpSum(1E03*PeakDemand)
if (arbitrage_option == True and regulation_option == True and deferral_option == False):
model += pulp.lpSum(okec_peak_charge*PeakDemand
+ [okec_avg_demand_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])/len(Input.index) for hour in Input.index]
+ [okec_fuel_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])*deltaT for hour in Input.index]
+ [-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index])
if (arbitrage_option == True and regulation_option == False and deferral_option == True):
model += pulp.lpSum(okec_peak_charge*PeakDemand
+ [okec_avg_demand_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])/len(Input.index) for hour in Input.index]
+ [okec_fuel_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])*deltaT for hour in Input.index]
+ 1E03*PeakDemand)
if (arbitrage_option == False and regulation_option == True and deferral_option == True):
model += pulp.lpSum([-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index]
+ 1E03*PeakDemand)
if (arbitrage_option == True and regulation_option == True and deferral_option == True):
model += pulp.lpSum(okec_peak_charge*PeakDemand
+ [okec_avg_demand_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])/len(Input.index) for hour in Input.index]
+ [okec_fuel_charge*(Input.loc[hour, "Load (kW)"]+VBpower[hour])*deltaT for hour in Input.index]
+ [-Input.loc[hour, "Reg-up Price ($/MW)"]/1000*reg_up[hour] for hour in Input.index]
+ [-Input.loc[hour, "Reg-dn Price ($/MW)"]/1000*reg_dn[hour] for hour in Input.index]
+ 1E03*PeakDemand)
# VB energy state as a function of VB power
for hour in Input.index:
if hour==1:
model += VBenergy[hour] == alpha*E_0 + VBpower[hour]*deltaT
else:
model += VBenergy[hour] == alpha*VBenergy[hour-1] + VBpower[hour]*deltaT
# hourly regulation constraints
for hour in Input.index:
if regulation_option:
model += reg_up[hour] == reg_dn[hour] # regulation balance
model += VBenergy[hour] - epsilon*reg_up[hour]*deltaT >= VBenergy[hour].lowBound
model += VBenergy[hour] + epsilon*reg_dn[hour]*deltaT <= VBenergy[hour].upBound
else:
model += reg_up[hour] == 0
model += reg_dn[hour] == 0
# extra constraints
for hour in Input.index:
model += PeakDemand >= Input.loc[hour, "Load (kW)"] + VBpower[hour]
model.solve()
###############################################################################
use_hour_indicator = []
for hour in Input.index:
if VBpower[hour].varValue != 0 or reg_up[hour].varValue != 0:
use_hour_indicator.append({'Hour': hour, 'Use': 1})
else:
use_hour_indicator.append({'Hour': hour, 'Use': 0})
output = []
for hour in Input.index:
var_output = {
'Hour': hour,
'VB energy (kWh)': int(100*VBenergy[hour].varValue)/100,
'VB power (kW)': int(100*VBpower[hour].varValue)/100,
'Load (kW)': int(100*Input.loc[hour, "Load (kW)"])/100,
'Net load (kW)': int(100*(VBpower[hour].varValue+Input.loc[hour, "Load (kW)"]))/100,
'Hour used': use_hour_indicator[hour-1]['Use']
}
if regulation_option:
var_regulation = {'Regulation (kW)': int(100*reg_up[hour].varValue)/100}
var_output.update(var_regulation)
output.append(var_output)
output_df = pd.DataFrame.from_records(output)
return output_df
| gpl-2.0 | -7,106,010,621,946,928,000 | 45.834646 | 308 | 0.581607 | false |
ablavatski/draw | tests/test_attention.py | 7 | 4359 |
import unittest
import theano
import numpy as np
from theano import tensor as T
from draw.attention import *
floatX = theano.config.floatX
def test_batched_dot():
a = T.ftensor3('a')
b = T.ftensor3('b')
c = my_batched_dot(a, b)
# Test in with values
dim1, dim2, dim3, dim4 = 10, 12, 15, 20
A_shape = (dim1, dim2, dim3)
B_shape = (dim1, dim3, dim4)
C_shape = (dim1, dim2, dim4)
A = np.arange(np.prod(A_shape)).reshape(A_shape).astype(floatX)
B = np.arange(np.prod(B_shape)).reshape(B_shape).astype(floatX)
C = c.eval({a: A, b: B})
# check shape
assert C.shape == C_shape
# check content
C_ = np.zeros((dim1, dim2, dim4))
for i in range(dim1):
C_[i] = np.dot(A[i], B[i])
assert np.allclose(C, C_)
class TestZoomableAttentionWindow:
def setUp(self):
# Device under test
self.channels = 1
self.height = 50
self.width = 120
self.N = 100
self.zaw = ZoomableAttentionWindow(self.channels, self.height, self.width, self.N)
def test_filterbank_matrices(self):
batch_size = 100
height, width = self.height, self.width
N = self.N
zaw = self.zaw
# Create theano function
center_y, center_x = T.fvectors('center_x', 'center_y')
delta, sigma = T.fvectors('delta', 'sigma')
FY, FX = zaw.filterbank_matrices(center_y, center_x, delta, sigma)
do_filterbank = theano.function(
[center_y, center_x, delta, sigma],
[FY, FX],
name="do_filterbank_matrices",
allow_input_downcast=True)
# test theano function
center_y = np.linspace(-height, 2*height, batch_size)
center_x = np.linspace(-width, 2*width, batch_size)
delta = np.linspace(0.1, height, batch_size)
sigma = np.linspace(0.1, height, batch_size)
FY, FX = do_filterbank(center_y, center_x, delta, sigma)
assert FY.shape == (batch_size, N, height)
assert FX.shape == (batch_size, N, width)
assert np.isfinite(FY).all()
assert np.isfinite(FX).all()
def test_read(self):
batch_size = 100
height, width = self.height, self.width
N = self.N
zaw = self.zaw
# Create theano function
images = T.ftensor3('images')
center_y, center_x = T.fvectors('center_x', 'center_y')
delta, sigma = T.fvectors('delta', 'sigma')
readout = zaw.read(images, center_y, center_x, delta, sigma)
do_read = theano.function(
[images, center_y, center_x, delta, sigma],
readout,
name="do_read",
allow_input_downcast=True)
# Test theano function
images = np.random.uniform(size=(batch_size, height, width))
center_y = np.linspace(-height, 2*height, batch_size)
center_x = np.linspace(-width, 2*width, batch_size)
delta = np.linspace(0.1, height, batch_size)
sigma = np.linspace(0.1, height, batch_size)
readout = do_read(images, center_y, center_x, delta, sigma)
assert readout.shape == (batch_size, N**2)
assert np.isfinite(readout).all()
assert (readout >= 0.).all()
assert (readout <= 1.).all()
def test_write(self):
batch_size = 100
height, width = self.height, self.width
N = self.N
zaw = self.zaw
# Create theano function
content = T.fmatrix('content')
center_y, center_x = T.fvectors('center_x', 'center_y')
delta, sigma = T.fvectors('delta', 'sigma')
images = zaw.write(content, center_y, center_x, delta, sigma)
do_write = theano.function(
[content, center_y, center_x, delta, sigma],
images,
name="do_write",
allow_input_downcast=True)
# Test theano function
content = np.random.uniform(size=(batch_size, N**2))
center_y = np.linspace(-height, 2*height, batch_size)
center_x = np.linspace(-width, 2*width, batch_size)
delta = np.linspace(0.1, height, batch_size)
sigma = np.linspace(0.1, height, batch_size)
images = do_write(content, center_y, center_x, delta, sigma)
assert images.shape == (batch_size, height*width)
assert np.isfinite(images).all()
| mit | -8,603,040,143,491,723,000 | 28.856164 | 90 | 0.578802 | false |
raccoongang/socraticqs2 | mysite/lti/tests.py | 1 | 21256 | # coding=utf-8
import json
import oauth2
from datetime import date, timedelta
import unittest
from mock import patch, Mock
from ddt import ddt, data, unpack
from django.utils import timezone
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from accounts.models import Profile, Instructor
from psa.models import UserSocialAuth
from ct.models import Course, Role, Unit, CourseUnit, UnitLesson, Lesson
from lti.models import LTIUser, CourseRef, LtiConsumer
from lti.views import create_courseref
class LTITestCase(TestCase):
def setUp(self):
"""
Preconditions.
"""
from chat.fsm_plugin.chat import get_specs
from chat.fsm_plugin.additional import get_specs as get_specs_additional
self.client = Client()
self.user = User.objects.create_user('test', 'test@test.com', 'test')
get_specs()[0].save_graph(self.user.username)
get_specs_additional()[0].save_graph(self.user.username)
mocked_nonce = u'135685044251684026041377608307'
mocked_timestamp = u'1234567890'
mocked_decoded_signature = u'my_signature='
self.headers = {
u'user_id': 1,
u'lis_person_name_full': u'Test Username',
u'lis_person_name_given': u'First',
u'lis_person_name_family': u'Second',
u'lis_person_contact_email_primary': u'test@test.com',
u'lis_person_sourcedid': u'Test_Username',
u'oauth_callback': u'about:blank',
u'launch_presentation_return_url': '',
u'lti_message_type': u'basic-lti-launch-request',
u'lti_version': 'LTI-1p0',
u'roles': u'Student',
u'context_id': 1,
u'tool_consumer_info_product_family_code': u'moodle',
u'context_title': u'Test title',
u'tool_consumer_instance_guid': u'test.dot.com',
u'resource_link_id': 'dfgsfhrybvrth',
u'lis_result_sourcedid': 'wesgaegagrreg',
u'oauth_nonce': mocked_nonce,
u'oauth_timestamp': mocked_timestamp,
u'oauth_consumer_key': u'consumer_key',
u'oauth_signature_method': u'HMAC-SHA1',
u'oauth_version': u'1.0',
u'oauth_signature': mocked_decoded_signature
}
self.unit = Unit(title='Test title', addedBy=self.user)
self.unit.save()
self.course = Course(title='Test title',
description='test description',
access='Public',
enrollCode='111',
lockout='222',
addedBy=self.user)
self.course.save()
self.course_ref = CourseRef(
course=self.course, context_id=self.headers.get('context_id'),
tc_guid=self.headers.get('tool_consumer_instance_guid')
)
self.course_ref.save()
self.course_ref.instructors.add(self.user)
self.role1 = Role(
role=Role.ENROLLED,
user=self.user,
course=self.course,
)
self.role1.save()
self.courseunit = CourseUnit(
unit=self.unit, course=self.course,
order=0, addedBy=self.user, releaseTime=timezone.now()
)
self.courseunit.save()
lesson = Lesson(title='title', text='text', addedBy=self.user)
lesson.save()
unitlesson = UnitLesson(
unit=self.unit, order=0, lesson=lesson, addedBy=self.user, treeID=lesson.id
)
unitlesson.save()
self.lti_consumer = LtiConsumer(
consumer_name='test',
consumer_key='consumer_key',
consumer_secret='test_key'
)
self.lti_consumer.save()
@patch('lti.views.DjangoToolProvider')
class MethodsTest(LTITestCase):
"""
Test for correct request method passed in view.
"""
@patch('lti.views.waffle.switch_is_active', return_value=False)
def test_post(self, switch, mocked):
mocked.return_value.is_valid_request.return_value = True
response = self.client.post('/lti/', data=self.headers, follow=True)
self.assertTemplateUsed(response, template_name='ct/course.html')
switch.return_value = True
response = self.client.post(
'/lti/',
data=self.headers,
follow=True
)
self.assertTemplateUsed(response, template_name='lms/course_page.html')
def test_failure_post(self, mocked):
mocked.return_value.is_valid_request.return_value = False
response = self.client.post('/lti/', data=self.headers, follow=True)
self.assertTemplateUsed(response, template_name='lti/error.html')
def test_get(self, mocked):
mocked.return_value.is_valid_request.return_value = True
response = self.client.get('/lti/', follow=True)
self.assertTemplateUsed(response, template_name='lti/error.html')
@ddt
@patch('lti.views.DjangoToolProvider')
class ParamsTest(LTITestCase):
"""
Test different params handling.
"""
@unpack
@data((Role.INSTRUCTOR, {u'roles': u'Instructor'}),
(Role.ENROLLED, {u'roles': u'Learner'}))
def test_roles(self, role, header, mocked):
self.headers.update(header)
mocked.return_value.is_valid_request.return_value = True
self.client.post('/lti/',
data=self.headers,
follow=True)
self.assertTrue(Role.objects.filter(role=role).exists())
def test_user_id(self, mocked):
del self.headers[u'user_id']
mocked.return_value.is_valid_request.return_value = True
response = self.client.post('/lti/',
data=self.headers,
follow=True)
self.assertTemplateUsed(response, template_name='lti/error.html')
def test_roles_none(self, mocked):
del self.headers[u'roles']
mocked.return_value.is_valid_request.return_value = True
self.client.post('/lti/', data=self.headers, follow=True)
self.assertTrue(Role.objects.filter(role=Role.ENROLLED).exists())
def test_lti_user(self, mocked):
"""
Default LTI user creation process.
"""
mocked.return_value.is_valid_request.return_value = True
self.client.post('/lti/', data=self.headers, follow=True)
self.assertTrue(LTIUser.objects.filter(lti_consumer=self.lti_consumer).exists())
self.assertTrue(Role.objects.filter(role=Role.ENROLLED).exists())
self.assertEqual(LTIUser.objects.get(lti_consumer=self.lti_consumer).django_user,
self.user)
def test_lti_users_same_full_name(self, mocked):
"""
Test user creation w/ the same `lis_person_name_full`.
"""
mocked.return_value.is_valid_request.return_value = True
# Link LtiUser to Django user by email
self.client.post('/lti/', data=self.headers, follow=True)
self.assertEqual(
self.user, LTIUser.objects.get(lti_consumer=self.lti_consumer).django_user
)
# Create new Django user
self.headers[u'user_id'] = 2
self.headers[u'lis_person_contact_email_primary'] = 'new_email@mail.com'
self.client.post('/lti/', data=self.headers, follow=True)
self.assertNotEqual(self.user, UserSocialAuth.objects.get(
uid=self.headers[u'lis_person_contact_email_primary']
).user)
first_user = UserSocialAuth.objects.get(
uid=self.headers[u'lis_person_contact_email_primary']
).user
# Create another Django user
self.headers[u'user_id'] = 3
self.headers[u'lis_person_contact_email_primary'] = 'new_email_2@mail.com'
self.client.post('/lti/', data=self.headers, follow=True)
self.assertNotEqual(first_user, UserSocialAuth.objects.get(
uid=self.headers[u'lis_person_contact_email_primary']
).user)
@patch('lti.models.hash_lti_user_data')
def test_lti_user_unicode_username(self, hash_lti_user_data, mocked):
"""
Test unicode full name from LTI.
"""
mocked.return_value.is_valid_request.return_value = True
hashvalue = 'somehashvalue'
hash_lti_user_data.return_value = hashvalue
self.headers[u'user_id'] = 2
self.headers[u'lis_person_contact_email_primary'] = 'new_email@mail.com'
self.headers[u'lis_person_name_full'] = u'きつね'
self.client.post('/lti/', data=self.headers, follow=True)
new_user = UserSocialAuth.objects.get(
uid=self.headers[u'lis_person_contact_email_primary']
).user
self.assertNotEqual(self.user, new_user)
self.assertEqual(new_user.username, hashvalue)
def test_lti_user_no_email(self, mocked):
del self.headers[u'lis_person_contact_email_primary']
mocked.return_value.is_valid_request.return_value = True
self.client.post('/lti/',
data=self.headers,
follow=True)
self.assertTrue(LTIUser.objects.filter(lti_consumer=self.lti_consumer).exists())
self.assertTrue(Role.objects.filter(role=Role.ENROLLED).exists())
self.assertNotEqual(LTIUser.objects.get(lti_consumer=self.lti_consumer).django_user,
User.objects.get(id=self.user.id))
@patch('lti.models.hash_lti_user_data')
def test_lti_user_no_username_no_email(self, hash_lti_user_data, mocked):
"""Test for non-existent username field
If there is no username in POST
we create user with random username.
"""
test_random_username = 'c'*32
del self.headers[u'lis_person_name_full']
del self.headers[u'lis_person_contact_email_primary']
mocked.return_value.is_valid_request.return_value = True
hash_lti_user_data.return_value = test_random_username[:30]
self.client.post('/lti/', data=self.headers, follow=True)
self.assertTrue(LTIUser.objects.filter(lti_consumer=self.lti_consumer).exists())
self.assertTrue(Role.objects.filter(role=Role.ENROLLED).exists())
self.assertNotEqual(LTIUser.objects.get(lti_consumer=self.lti_consumer).django_user,
User.objects.get(id=self.user.id))
self.assertEqual(
LTIUser.objects.get(lti_consumer=self.lti_consumer).django_user.username,
test_random_username[:30]
)
self.assertEqual(
len(LTIUser.objects.get(lti_consumer=self.lti_consumer).django_user.username),
30
)
def test_lti_user_link_social(self, mocked):
"""
Default LTI user creation process.
"""
social = UserSocialAuth(
user=self.user,
uid=self.headers[u'lis_person_contact_email_primary'],
provider='email'
)
social.save()
mocked.return_value.is_valid_request.return_value = True
self.client.post('/lti/',
data=self.headers,
follow=True)
self.assertTrue(LTIUser.objects.filter(lti_consumer=self.lti_consumer).exists())
self.assertTrue(Role.objects.filter(role=Role.ENROLLED).exists())
self.assertEqual(LTIUser.objects.get(lti_consumer=self.lti_consumer).django_user,
social.user)
def test_lti_user_timezone_positive(self, mocked):
self.user.profile.delete()
mocked.return_value.is_valid_request.return_value = True
self.client.post('/lti/', data=self.headers, follow=True, REMOTE_ADDR='194.242.96.17')
# update user, get it again with newly created profile
user = User.objects.get(id=self.user.id)
# profile should be created
self.assertIsNotNone(user.profile)
profile = Profile.objects.get(user=user)
self.assertEqual(profile.id, user.profile.id)
self.assertIsNotNone(profile.timezone)
# IP 194.242.96.17 should be associated with Europe/Zaporozhye TZ
self.assertEqual(profile.timezone, 'Europe/Zaporozhye')
def test_lti_user_timezone_default_value(self, mocked):
self.user.profile.delete()
mocked.return_value.is_valid_request.return_value = True
self.client.post('/lti/', data=self.headers, follow=True, REMOTE_ADDR='172.16.0.1')
# update user, get it again with newly created profile
user = User.objects.get(id=self.user.id)
# profile should be created
self.assertIsNotNone(user.profile)
profile = Profile.objects.get(user=user)
self.assertEqual(profile.id, user.profile.id)
self.assertIsNotNone(profile.timezone)
# IP 194.242.96.17 should be associated with Europe/Zaporozhye TZ
from django.conf import settings
self.assertEqual(profile.timezone, settings.TIME_ZONE)
@ddt
@patch('lti.views.DjangoToolProvider')
class ExceptionTest(LTITestCase):
"""
Test raising exception.
"""
@data(oauth2.MissingSignature, oauth2.Error, KeyError, AttributeError)
def test_exceptions(self, exception, mocked):
mocked.return_value.is_valid_request.side_effect = exception()
response = self.client.get('/lti/', follow=True)
self.assertTemplateUsed(response, template_name='lti/error.html')
class ModelTest(LTITestCase):
"""
Test model LTIUser.
"""
def test_lti_user_not_enrolled(self):
"""Test that user not enrolled yet"""
lti_user = LTIUser(user_id=self.user.id,
lti_consumer=self.lti_consumer,
extra_data=json.dumps(self.headers),
django_user=self.user)
lti_user.save()
self.role1.delete()
self.assertFalse(lti_user.is_enrolled('student', self.course.id))
def test_lti_user(self):
"""Test enrollment process"""
lti_user = LTIUser(user_id=self.user.id,
lti_consumer=self.lti_consumer,
extra_data=json.dumps(self.headers),
django_user=self.user)
lti_user.save()
lti_user.enroll('student', self.course.id)
self.assertTrue(lti_user.is_enrolled('student', self.course.id))
def test_lti_user_create_links(self):
"""Creating LTIUser without Django user
Testing Django user creation process.
"""
lti_user = LTIUser(user_id=self.user.id,
lti_consumer=self.lti_consumer,
extra_data=json.dumps(self.headers))
lti_user.save()
self.assertFalse(lti_user.is_linked)
lti_user.create_links()
self.assertTrue(lti_user.is_linked)
@ddt
@patch('lti.views.DjangoToolProvider')
class TestCourseRef(LTITestCase):
"""
Testing CourseRef object.
"""
def test_course_ref_roles(self, mocked):
"""Test different action for different roles"""
mocked.return_value.is_valid_request.return_value = True
self.course_ref.delete()
response = self.client.post('/lti/', data=self.headers, follow=True)
self.assertFalse(CourseRef.objects.filter(course=self.course).exists())
self.assertTemplateUsed(response, 'lti/error.html')
def test_create_courseref_only_lti(self, mocked):
"""
Test that only LTI is allowed.
"""
request = Mock()
request.session = {}
res = create_courseref(request)
self.assertEqual(res.content, 'Only LTI allowed')
@unpack
@data(('1', 'ct:course'), ('1111', 'ct:edit_course'))
def test_create_courseref_existence(self, context_id, langing_page, mocked):
"""
Test for existence/non-existence of CourseRef.
"""
_id = self.course.id if context_id == '1' else self.course.id + 1
lti_post = {'context_id': context_id,
'context_title': 'test title',
'tool_consumer_instance_guid': 'test.dot.com',
'roles': 'Instructor'}
request = Mock()
request.user = self.user
request.session = {'LTI_POST': lti_post,
'is_valid': True}
res = create_courseref(request)
self.assertEqual(res.url, reverse(langing_page, args=(_id,)))
@patch('lti.views.waffle.switch_is_active', return_value=False)
@patch('lti.views.DjangoToolProvider')
class TestUnit(LTITestCase):
"""
Testing Unit template rendering.
"""
def test_unit_render(self, mocked, switch):
mocked.return_value.is_valid_request.return_value = True
response = self.client.post(
'/lti/unit/{}/'.format(self.unit.id), data=self.headers, follow=True
)
self.assertTemplateUsed(response, 'ct/study_unit.html')
switch.return_value = True
response = self.client.post(
'/lti/unit/{}/'.format(self.unit.id), data=self.headers, follow=True
)
self.assertTemplateUsed(response, 'chat/main_view.html')
def test_instructor_enabled_404_wo_instructor_profile(self, mocked, switch):
"""
Checks redirect to the new Instructor UI but fail on became instructor.
"""
mocked.return_value.is_valid_request.return_value = True
switch.return_value = True
headers = self.headers.copy()
headers['roles'] = 'Instructor'
response = self.client.post(
'/lti/unit/{}/'.format(self.unit.id), data=headers, follow=True
)
_next = reverse('ctms:courslet_view', args=(self.course.id, self.unit.id))
self.assertRedirects(response, '{}?next={}'.format(reverse('accounts:profile_update'), _next))
@unpack
@data((Role.INSTRUCTOR, {u'roles': u'Instructor'}),
(Role.ENROLLED, {u'roles': u'Learner'}))
def test_instructor_enabled_w_instructor_profile_unit_view(self, mocked, switch):
"""
Checks redirect to the new Instructor UI on courselet detail page.
"""
mocked.return_value.is_valid_request.return_value = True
switch.return_value = True
headers = self.headers.copy()
headers['roles'] = 'Instructor'
Instructor.objects.create(user=self.user, institution='institute',
what_do_you_teach='something')
response = self.client.post(
'/lti/unit/{}/'.format(self.unit.id), data=headers, follow=True
)
assert response.status_code == 200
self.assertTemplateUsed(response, 'ctms/courselet_detail.html')
def test_instructor_enabled_w_instructor_profile_course_view(self, mocked, switch):
"""
Checks redirect to the new Instructor UI on course detail page.
"""
mocked.return_value.is_valid_request.return_value = True
switch.return_value = True
headers = self.headers.copy()
headers['roles'] = 'Instructor'
Instructor.objects.create(user=self.user, institution='institute',
what_do_you_teach='something')
response = self.client.post('/lti/', data=headers, follow=True)
assert response.status_code == 200
self.assertTemplateUsed(response, 'ctms/course_detail.html')
class AcceptanceTests(LTITestCase):
"""
Acceptance test to check different flows of handling LTI requests.
"""
def test_expired_consumer(self):
"""
Checking that expired consumer will not be used.
"""
self.lti_consumer.expiration_date = date.today() - timedelta(days=1)
response = self.client.post('/lti/', data=self.headers, follow=True)
self.assertTemplateUsed(response, 'lti/error.html')
@patch('lti.views.LtiConsumer.objects.filter')
def test_short_term_consumer(self, mocked_consumer):
"""
Test that user w/ short_term flag will be treated correctly.
"""
self.lti_consumer.expiration_date = date.today() + timedelta(days=1)
self.headers['custom_short_term'] = 'true'
response = self.client.post('/lti/', data=self.headers, follow=True)
mocked_consumer.assert_called_once_with(
consumer_key=self.headers['oauth_consumer_key']
)
@patch('lti.views.LtiConsumer.get_or_combine')
def test_typical_consumer(self, mocked_consumer):
"""
Typical LTi request (w/o short_term flag) will be treated w/ get_or_combine.
"""
self.lti_consumer.expiration_date = date.today() + timedelta(days=1)
response = self.client.post('/lti/', data=self.headers, follow=True)
mocked_consumer.assert_called_once_with(
self.headers['tool_consumer_instance_guid'],
self.headers['oauth_consumer_key'],
)
def test_no_consumer_found(self):
"""
If there is no LtiConsumer found throw error.
"""
self.lti_consumer.delete()
response = self.client.post('/lti/', data=self.headers, follow=True)
self.assertTemplateUsed(response, 'lti/error.html')
| apache-2.0 | 8,832,074,296,113,942,000 | 39.170132 | 102 | 0.615718 | false |
lkhomenk/integration_tests | cfme/automate/dialogs/service_dialogs.py | 6 | 4707 | import attr
from cached_property import cached_property
from navmazing import NavigateToAttribute, NavigateToSibling
from widgetastic.utils import Fillable
from widgetastic.widget import Text
from widgetastic_patternfly import CandidateNotFound
from cfme.exceptions import ItemNotFound
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to
from widgetastic_manageiq import PaginationPane, Table
from . import AutomateCustomizationView, AddDialogView, EditDialogView
from .dialog_tab import TabCollection
class DialogsView(AutomateCustomizationView):
title = Text("#explorer_title_text")
paginator = PaginationPane()
table = Table(".//div[@id='list_grid' or @class='miq-data-table']/table")
@property
def is_displayed(self):
return (
self.in_customization and
self.title.text == 'All Dialogs' and
self.service_dialogs.is_opened and
self.service_dialogs.tree.currently_selected == ["All Dialogs"])
class DetailsDialogView(AutomateCustomizationView):
title = Text("#explorer_title_text")
@property
def is_displayed(self):
return (
self.in_customization and self.service_dialogs.is_opened and
self.title.text == 'Dialog "{}"'.format(self.context['object'].label)
)
@attr.s
class Dialog(BaseEntity, Fillable):
"""A class representing one Dialog in the UI."""
label = attr.ib()
description = attr.ib(default=None)
_collections = {'tabs': TabCollection}
def as_fill_value(self):
return self.label
@property
def dialog(self):
return self
@cached_property
def tabs(self):
return self.collections.tabs
@property
def tree_path(self):
return self.parent.tree_path + [self.label]
def update(self, updates):
""" Update dialog method"""
view = navigate_to(self, 'Edit')
changed = view.fill(updates)
if changed:
view.save_button.click()
else:
view.cancel_button.click()
view = self.create_view(DetailsDialogView, override=updates)
assert view.is_displayed
view.flash.assert_no_error()
if changed:
view.flash.assert_message(
'{} was saved'.format(updates.get('name', self.label)))
else:
view.flash.assert_message(
'Dialog editing was canceled by the user.')
def delete(self):
""" Delete dialog method"""
view = navigate_to(self, "Details")
view.configuration.item_select('Remove Dialog', handle_alert=True)
view = self.create_view(DialogsView)
assert view.is_displayed
view.flash.assert_no_error()
view.flash.assert_success_message(
'Dialog "{}": Delete successful'.format(self.label))
@property
def exists(self):
""" Returns True if dialog exists"""
try:
navigate_to(self, 'Details')
return True
except (CandidateNotFound, ItemNotFound):
return False
def delete_if_exists(self):
if self.exists:
self.delete()
@attr.s
class DialogCollection(BaseCollection):
"""Collection object for the :py:class:`Dialog`."""
tree_path = ['All Dialogs']
ENTITY = Dialog
def create(self, label=None, description=None):
""" Create dialog label method """
view = navigate_to(self, 'Add')
view.fill({'label': label, 'description': description})
return self.instantiate(
label=label, description=description)
@navigator.register(DialogCollection)
class All(CFMENavigateStep):
VIEW = DialogsView
prerequisite = NavigateToAttribute('appliance.server', 'AutomateCustomization')
def step(self):
self.view.service_dialogs.tree.click_path(*self.obj.tree_path)
@navigator.register(DialogCollection)
class Add(CFMENavigateStep):
VIEW = AddDialogView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.configuration.item_select('Add a new Dialog')
@navigator.register(Dialog)
class Details(CFMENavigateStep):
VIEW = DetailsDialogView
prerequisite = NavigateToAttribute('appliance.server', 'AutomateCustomization')
def step(self):
self.prerequisite_view.service_dialogs.tree.click_path(*self.obj.tree_path)
@navigator.register(Dialog)
class Edit(CFMENavigateStep):
VIEW = EditDialogView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.configuration.item_select("Edit this Dialog")
| gpl-2.0 | 7,005,621,286,888,689,000 | 28.791139 | 92 | 0.665604 | false |
samhoo/askbot-realworld | askbot/migrations/0044_migrate_has_custom_avatar_field.py | 20 | 27460 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from askbot.utils.console import print_action
from unidecode import unidecode
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
print 'Migrating users to new avatar field'
for user in orm['auth.user'].objects.all():
print_action('migrating user: %s' % unidecode(user.username))
if user.has_custom_avatar == True:
user.avatar_type = 'a'
else:
user.avatar_type = 'n'
user.save()
print_action(
'user %s migrated avatar_type: %s' % \
(unidecode(user.username), user.avatar_type)
)
def backwards(self, orm):
"Write your backwards methods here."
pass
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'ordering': "('-revision',)", 'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'askbot.comment': {
'Meta': {'ordering': "('-added_at',)", 'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'offensive_flag_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'ordering': "('-revision',)", 'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'has_custom_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
| gpl-3.0 | -5,655,925,277,057,785,000 | 84.01548 | 221 | 0.556883 | false |
gentledevil/ansible | lib/ansible/plugins/action/include_vars.py | 82 | 1853 | # (c) 2013-2014, Benno Joy <benno@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.errors import AnsibleError
from ansible.parsing import DataLoader
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=dict()):
source = self._task.args.get('_raw_params')
if self._task._role:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'vars', source)
else:
source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'vars', source)
if os.path.exists(source):
(data, show_content) = self._loader._get_file_contents(source)
data = self._loader.load(data, show_content)
if data is None:
data = {}
if not isinstance(data, dict):
raise AnsibleError("%s must be stored as a dictionary/hash" % source)
return dict(ansible_facts=data, _ansible_no_log=not show_content)
else:
return dict(failed=True, msg="Source file not found.", file=source)
| gpl-3.0 | -5,914,072,489,739,361,000 | 36.816327 | 97 | 0.67728 | false |
dga4654dan/UTM-Demo | V_1_0_2_1/UtmDemo_Sfs_2.9.0/UtmDemo_Sfs_2.9.0_Server/lib/Lib/test/test_bisect.py | 15 | 5533 | from test_support import TestFailed
import bisect
import sys
nerrors = 0
def check_bisect(func, list, elt, expected):
global nerrors
got = func(list, elt)
if got != expected:
print >> sys.stderr, \
"expected %s(%s, %s) -> %s, but got %s" % (func.__name__,
list,
elt,
expected,
got)
nerrors += 1
# XXX optional slice arguments need tests.
check_bisect(bisect.bisect_right, [], 1, 0)
check_bisect(bisect.bisect_right, [1], 0, 0)
check_bisect(bisect.bisect_right, [1], 1, 1)
check_bisect(bisect.bisect_right, [1], 2, 1)
check_bisect(bisect.bisect_right, [1, 1], 0, 0)
check_bisect(bisect.bisect_right, [1, 1], 1, 2)
check_bisect(bisect.bisect_right, [1, 1], 2, 2)
check_bisect(bisect.bisect_right, [1, 1, 1], 0, 0)
check_bisect(bisect.bisect_right, [1, 1, 1], 1, 3)
check_bisect(bisect.bisect_right, [1, 1, 1], 2, 3)
check_bisect(bisect.bisect_right, [1, 1, 1, 1], 0, 0)
check_bisect(bisect.bisect_right, [1, 1, 1, 1], 1, 4)
check_bisect(bisect.bisect_right, [1, 1, 1, 1], 2, 4)
check_bisect(bisect.bisect_right, [1, 2], 0, 0)
check_bisect(bisect.bisect_right, [1, 2], 1, 1)
check_bisect(bisect.bisect_right, [1, 2], 1.5, 1)
check_bisect(bisect.bisect_right, [1, 2], 2, 2)
check_bisect(bisect.bisect_right, [1, 2], 3, 2)
check_bisect(bisect.bisect_right, [1, 1, 2, 2], 0, 0)
check_bisect(bisect.bisect_right, [1, 1, 2, 2], 1, 2)
check_bisect(bisect.bisect_right, [1, 1, 2, 2], 1.5, 2)
check_bisect(bisect.bisect_right, [1, 1, 2, 2], 2, 4)
check_bisect(bisect.bisect_right, [1, 1, 2, 2], 3, 4)
check_bisect(bisect.bisect_right, [1, 2, 3], 0, 0)
check_bisect(bisect.bisect_right, [1, 2, 3], 1, 1)
check_bisect(bisect.bisect_right, [1, 2, 3], 1.5, 1)
check_bisect(bisect.bisect_right, [1, 2, 3], 2, 2)
check_bisect(bisect.bisect_right, [1, 2, 3], 2.5, 2)
check_bisect(bisect.bisect_right, [1, 2, 3], 3, 3)
check_bisect(bisect.bisect_right, [1, 2, 3], 4, 3)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 0, 0)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 1, 1)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 1.5, 1)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 2, 3)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 2.5, 3)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 3, 6)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 3.5, 6)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 4, 10)
check_bisect(bisect.bisect_right, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 5, 10)
check_bisect(bisect.bisect_left, [], 1, 0)
check_bisect(bisect.bisect_left, [1], 0, 0)
check_bisect(bisect.bisect_left, [1], 1, 0)
check_bisect(bisect.bisect_left, [1], 2, 1)
check_bisect(bisect.bisect_left, [1, 1], 0, 0)
check_bisect(bisect.bisect_left, [1, 1], 1, 0)
check_bisect(bisect.bisect_left, [1, 1], 2, 2)
check_bisect(bisect.bisect_left, [1, 1, 1], 0, 0)
check_bisect(bisect.bisect_left, [1, 1, 1], 1, 0)
check_bisect(bisect.bisect_left, [1, 1, 1], 2, 3)
check_bisect(bisect.bisect_left, [1, 1, 1, 1], 0, 0)
check_bisect(bisect.bisect_left, [1, 1, 1, 1], 1, 0)
check_bisect(bisect.bisect_left, [1, 1, 1, 1], 2, 4)
check_bisect(bisect.bisect_left, [1, 2], 0, 0)
check_bisect(bisect.bisect_left, [1, 2], 1, 0)
check_bisect(bisect.bisect_left, [1, 2], 1.5, 1)
check_bisect(bisect.bisect_left, [1, 2], 2, 1)
check_bisect(bisect.bisect_left, [1, 2], 3, 2)
check_bisect(bisect.bisect_left, [1, 1, 2, 2], 0, 0)
check_bisect(bisect.bisect_left, [1, 1, 2, 2], 1, 0)
check_bisect(bisect.bisect_left, [1, 1, 2, 2], 1.5, 2)
check_bisect(bisect.bisect_left, [1, 1, 2, 2], 2, 2)
check_bisect(bisect.bisect_left, [1, 1, 2, 2], 3, 4)
check_bisect(bisect.bisect_left, [1, 2, 3], 0, 0)
check_bisect(bisect.bisect_left, [1, 2, 3], 1, 0)
check_bisect(bisect.bisect_left, [1, 2, 3], 1.5, 1)
check_bisect(bisect.bisect_left, [1, 2, 3], 2, 1)
check_bisect(bisect.bisect_left, [1, 2, 3], 2.5, 2)
check_bisect(bisect.bisect_left, [1, 2, 3], 3, 2)
check_bisect(bisect.bisect_left, [1, 2, 3], 4, 3)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 0, 0)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 1, 0)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 1.5, 1)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 2, 1)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 2.5, 3)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 3, 3)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 3.5, 6)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 4, 6)
check_bisect(bisect.bisect_left, [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], 5, 10)
def check_insort(n):
global nerrors
from random import choice
import sys
digits = "0123456789"
raw = []
insorted = []
for i in range(n):
digit = choice(digits)
raw.append(digit)
if digit in "02468":
f = bisect.insort_left
else:
f = bisect.insort_right
f(insorted, digit)
sorted = raw[:]
sorted.sort()
if sorted == insorted:
return
print >> sys.stderr, "insort test failed: raw %s got %s" % (raw, insorted)
nerrors += 1
check_insort(500)
if nerrors:
raise TestFailed("%d errors in test_bisect" % nerrors)
| gpl-2.0 | 6,152,643,465,653,147,000 | 42.566929 | 78 | 0.587746 | false |
hogarthj/ansible | test/units/parsing/utils/test_yaml.py | 159 | 1176 | # -*- coding: utf-8 -*-
# (c) 2017, Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible.errors import AnsibleParserError
from ansible.parsing.utils.yaml import from_yaml
def test_from_yaml_simple():
assert from_yaml(u'---\n- test: 1\n test2: "2"\n- caf\xe9: "caf\xe9"') == [{u'test': 1, u'test2': u"2"}, {u"caf\xe9": u"caf\xe9"}]
def test_bad_yaml():
with pytest.raises(AnsibleParserError):
from_yaml(u'foo: bar: baz')
| gpl-3.0 | 7,001,523,356,449,801,000 | 33.588235 | 135 | 0.714286 | false |
bala4901/odoo | addons/website/models/test_models.py | 56 | 1285 | # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class test_converter(orm.Model):
_name = 'website.converter.test'
_columns = {
'char': fields.char(),
'integer': fields.integer(),
'float': fields.float(),
'numeric': fields.float(digits=(16, 2)),
'many2one': fields.many2one('website.converter.test.sub'),
'binary': fields.binary(),
'date': fields.date(),
'datetime': fields.datetime(),
'selection': fields.selection([
(1, "réponse A"),
(2, "réponse B"),
(3, "réponse C"),
(4, "réponse D"),
]),
'selection_str': fields.selection([
('A', "Qu'il n'est pas arrivé à Toronto"),
('B', "Qu'il était supposé arriver à Toronto"),
('C', "Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"),
('D', "La réponse D"),
], string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et "
u"qu'il fait une escale technique à St Claude, on dit:"),
'html': fields.html(),
'text': fields.text(),
}
class test_converter_sub(orm.Model):
_name = 'website.converter.test.sub'
_columns = {
'name': fields.char(),
}
| agpl-3.0 | 6,485,907,611,561,294,000 | 31.641026 | 81 | 0.527101 | false |
2uller/LotF | App/Tools/Scripts/texcheck.py | 12 | 9489 | """ TeXcheck.py -- rough syntax checking on Python style LaTeX documents.
Written by Raymond D. Hettinger <python at rcn.com>
Copyright (c) 2003 Python Software Foundation. All rights reserved.
Designed to catch common markup errors including:
* Unbalanced or mismatched parenthesis, brackets, and braces.
* Unbalanced or mismatched \\begin and \\end blocks.
* Misspelled or invalid LaTeX commands.
* Use of forward slashes instead of backslashes for commands.
* Table line size mismatches.
Sample command line usage:
python texcheck.py -k chapterheading -m lib/librandomtex *.tex
Options:
-m Munge parenthesis and brackets. [0,n) would normally mismatch.
-k keyword: Keyword is a valid LaTeX command. Do not include the backslash.
-d: Delimiter check only (useful for non-LaTeX files).
-h: Help
-s lineno: Start at lineno (useful for skipping complex sections).
-v: Verbose. Trace the matching of //begin and //end blocks.
"""
import re
import sys
import getopt
from itertools import izip, count, islice
import glob
cmdstr = r"""
\section \module \declaremodule \modulesynopsis \moduleauthor
\sectionauthor \versionadded \code \class \method \begin
\optional \var \ref \end \subsection \lineiii \hline \label
\indexii \textrm \ldots \keyword \stindex \index \item \note
\withsubitem \ttindex \footnote \citetitle \samp \opindex
\noindent \exception \strong \dfn \ctype \obindex \character
\indexiii \function \bifuncindex \refmodule \refbimodindex
\subsubsection \nodename \member \chapter \emph \ASCII \UNIX
\regexp \program \production \token \productioncont \term
\grammartoken \lineii \seemodule \file \EOF \documentclass
\usepackage \title \input \maketitle \ifhtml \fi \url \Cpp
\tableofcontents \kbd \programopt \envvar \refstmodindex
\cfunction \constant \NULL \moreargs \cfuncline \cdata
\textasciicircum \n \ABC \setindexsubitem \versionchanged
\deprecated \seetext \newcommand \POSIX \pep \warning \rfc
\verbatiminput \methodline \textgreater \seetitle \lineiv
\funclineni \ulink \manpage \funcline \dataline \unspecified
\textbackslash \mimetype \mailheader \seepep \textunderscore
\longprogramopt \infinity \plusminus \shortversion \version
\refmodindex \seerfc \makeindex \makemodindex \renewcommand
\indexname \appendix \protect \indexiv \mbox \textasciitilde
\platform \seeurl \leftmargin \labelwidth \localmoduletable
\LaTeX \copyright \memberline \backslash \pi \centerline
\caption \vspace \textwidth \menuselection \textless
\makevar \csimplemacro \menuselection \bfcode \sub \release
\email \kwindex \refexmodindex \filenq \e \menuselection
\exindex \linev \newsgroup \verbatim \setshortversion
\author \authoraddress \paragraph \subparagraph \cmemberline
\textbar \C \seelink
"""
def matchclose(c_lineno, c_symbol, openers, pairmap):
"Verify that closing delimiter matches most recent opening delimiter"
try:
o_lineno, o_symbol = openers.pop()
except IndexError:
print "\nDelimiter mismatch. On line %d, encountered closing '%s' without corresponding open" % (c_lineno, c_symbol)
return
if o_symbol in pairmap.get(c_symbol, [c_symbol]): return
print "\nOpener '%s' on line %d was not closed before encountering '%s' on line %d" % (o_symbol, o_lineno, c_symbol, c_lineno)
return
def checkit(source, opts, morecmds=[]):
"""Check the LaTeX formatting in a sequence of lines.
Opts is a mapping of options to option values if any:
-m munge parenthesis and brackets
-d delimiters only checking
-v verbose trace of delimiter matching
-s lineno: linenumber to start scan (default is 1).
Morecmds is a sequence of LaTeX commands (without backslashes) that
are to be considered valid in the scan.
"""
texcmd = re.compile(r'\\[A-Za-z]+')
falsetexcmd = re.compile(r'\/([A-Za-z]+)') # Mismarked with forward slash
validcmds = set(cmdstr.split())
for cmd in morecmds:
validcmds.add('\\' + cmd)
if '-m' in opts:
pairmap = {']':'[(', ')':'(['} # Munged openers
else:
pairmap = {']':'[', ')':'('} # Normal opener for a given closer
openpunct = set('([') # Set of valid openers
delimiters = re.compile(r'\\(begin|end){([_a-zA-Z]+)}|([()\[\]])')
braces = re.compile(r'({)|(})')
doubledwords = re.compile(r'(\b[A-za-z]+\b) \b\1\b')
spacingmarkup = re.compile(r'\\(ABC|ASCII|C|Cpp|EOF|infinity|NULL|plusminus|POSIX|UNIX)\s')
openers = [] # Stack of pending open delimiters
bracestack = [] # Stack of pending open braces
tablestart = re.compile(r'\\begin{(?:long)?table([iv]+)}')
tableline = re.compile(r'\\line([iv]+){')
tableend = re.compile(r'\\end{(?:long)?table([iv]+)}')
tablelevel = ''
tablestartline = 0
startline = int(opts.get('-s', '1'))
lineno = 0
for lineno, line in izip(count(startline), islice(source, startline-1, None)):
line = line.rstrip()
# Check balancing of open/close parenthesis, brackets, and begin/end blocks
for begend, name, punct in delimiters.findall(line):
if '-v' in opts:
print lineno, '|', begend, name, punct,
if begend == 'begin' and '-d' not in opts:
openers.append((lineno, name))
elif punct in openpunct:
openers.append((lineno, punct))
elif begend == 'end' and '-d' not in opts:
matchclose(lineno, name, openers, pairmap)
elif punct in pairmap:
matchclose(lineno, punct, openers, pairmap)
if '-v' in opts:
print ' --> ', openers
# Balance opening and closing braces
for open, close in braces.findall(line):
if open == '{':
bracestack.append(lineno)
if close == '}':
try:
bracestack.pop()
except IndexError:
print r'Warning, unmatched } on line %s.' % (lineno,)
# Optionally, skip LaTeX specific checks
if '-d' in opts:
continue
# Warn whenever forward slashes encountered with a LaTeX command
for cmd in falsetexcmd.findall(line):
if '822' in line or '.html' in line:
continue # Ignore false positives for urls and for /rfc822
if '\\' + cmd in validcmds:
print 'Warning, forward slash used on line %d with cmd: /%s' % (lineno, cmd)
# Check for markup requiring {} for correct spacing
for cmd in spacingmarkup.findall(line):
print r'Warning, \%s should be written as \%s{} on line %d' % (cmd, cmd, lineno)
# Validate commands
nc = line.find(r'\newcommand')
if nc != -1:
start = line.find('{', nc)
end = line.find('}', start)
validcmds.add(line[start+1:end])
for cmd in texcmd.findall(line):
if cmd not in validcmds:
print r'Warning, unknown tex cmd on line %d: \%s' % (lineno, cmd)
# Check table levels (make sure lineii only inside tableii)
m = tablestart.search(line)
if m:
tablelevel = m.group(1)
tablestartline = lineno
m = tableline.search(line)
if m and m.group(1) != tablelevel:
print r'Warning, \line%s on line %d does not match \table%s on line %d' % (m.group(1), lineno, tablelevel, tablestartline)
if tableend.search(line):
tablelevel = ''
# Style guide warnings
if 'e.g.' in line or 'i.e.' in line:
print r'Style warning, avoid use of i.e or e.g. on line %d' % (lineno,)
for dw in doubledwords.findall(line):
print r'Doubled word warning. "%s" on line %d' % (dw, lineno)
lastline = lineno
for lineno, symbol in openers:
print "Unmatched open delimiter '%s' on line %d" % (symbol, lineno)
for lineno in bracestack:
print "Unmatched { on line %d" % (lineno,)
print 'Done checking %d lines.' % (lastline,)
return 0
def main(args=None):
if args is None:
args = sys.argv[1:]
optitems, arglist = getopt.getopt(args, "k:mdhs:v")
opts = dict(optitems)
if '-h' in opts or args==[]:
print __doc__
return 0
if len(arglist) < 1:
print 'Please specify a file to be checked'
return 1
for i, filespec in enumerate(arglist):
if '*' in filespec or '?' in filespec:
arglist[i:i+1] = glob.glob(filespec)
morecmds = [v for k,v in optitems if k=='-k']
err = []
for filename in arglist:
print '=' * 30
print "Checking", filename
try:
f = open(filename)
except IOError:
print 'Cannot open file %s.' % arglist[0]
return 2
try:
err.append(checkit(f, opts, morecmds))
finally:
f.close()
return max(err)
if __name__ == '__main__':
sys.exit(main())
| gpl-2.0 | 5,462,484,294,102,322,000 | 38.725322 | 134 | 0.595532 | false |
xor10/adsbypasser | deploy/mirrors/summary.py | 5 | 4293 | import re
import sys
from markdown.blockprocessors import BlockProcessor
from markdown.preprocessors import Preprocessor
from markdown.preprocessors import ReferencePreprocessor
from markdown.extensions import Extension
from markdown import markdown
from util import to_abs_path
_CHANGELOG_PATH = to_abs_path('../../CHANGELOG.md')
_SITES_PATH = to_abs_path('../../SITES.md')
_TEMPLATE_PATH = to_abs_path('./summary.template.md')
_MESSAGE = {
'both': '''**Lite edition** removes image-hosting site support from **Full edition**.
If you prefer to use other userscripts to deal with image-hosting sites, you can use the Lite edition.
''',
'full': 'If you do not need image-hosting site support, please see [Lite edition]({url}).',
'lite': 'Lite edition does not support image-hosting sites. If you want full-featured edition, please see [here]({url}).',
}
class _ChangeLogProcessor(BlockProcessor):
def __init__(self, parser, pack):
BlockProcessor.__init__(self, parser)
self._pack = pack
self._first = True
def test(self, parent, block):
return self._first
def run(self, parent, blocks):
h = blocks[0]
b = blocks[1]
self._pack.setChangeLog(h, b)
self._first = False
class _ChangeLogExtension(Extension):
def __init__(self, pack):
super(_ChangeLogExtension, self).__init__()
self._pack = pack
def extendMarkdown(self, md, md_globals):
clp = _ChangeLogProcessor(md.parser, self._pack)
md.parser.blockprocessors.add('changelog', clp, '>empty')
class _SitesProcessor(BlockProcessor):
def __init__(self, parser, pack):
BlockProcessor.__init__(self, parser)
self._pack = pack
self._first = True
def test(self, parent, block):
return self._first
def run(self, parent, blocks):
a = blocks[0]
a = a.splitlines()
c = []
d = 0
for b in a:
if b == '* else':
pass
elif b[0] == '*':
c.append(b)
else:
d = d + 1
c = '\n'.join(c)
self._pack.setSites(c, d)
self._first = False
class _SitesExtension(Extension):
def __init__(self, pack):
super(_SitesExtension, self).__init__()
self._pack = pack
def extendMarkdown(self, md, md_globals):
ssp = _SitesProcessor(md.parser, self._pack)
md.parser.blockprocessors.add('sites', ssp, '>empty')
class _Pack(object):
def __init__(self, cl, ss, tpl):
self._cl_head = None
self._cl_body = None
self._ss_group = None
self._ss_count = None
self._tpl = tpl
cle = _ChangeLogExtension(self)
unused = markdown(cl, [cle])
sse = _SitesExtension(self)
unused = markdown(ss, [sse])
self._cl = '{0}\n\n{1}'.format(self._cl_head, self._cl_body)
def setChangeLog(self, head, body):
self._cl_head = head
self._cl_body = body
def setSites(self, group, count):
self._ss_group = group
self._ss_count = count
def getResult(self, edition, url):
args = {
'changelog': self._cl,
'sites': self._ss_group,
'count': self._ss_count,
'edition': _MESSAGE[edition].format(url=url),
}
summary = self._tpl.format(**args)
return summary
def make_summary():
fin = open(_CHANGELOG_PATH, 'r')
cl = fin.read()
fin.close()
fin = open(_SITES_PATH, 'r')
ss = fin.read()
fin.close()
fin = open(_TEMPLATE_PATH, 'r')
tpl = fin.read()
tpl = tpl.decode('utf-8')
fin.close()
pack = _Pack(cl, ss, tpl)
return pack
def main(args=None):
if args is None:
args = sys.argv
summary = make_summary()
result = summary.getResult('both', '')
summary_path = to_abs_path('../../dest/summary.md')
with open(summary_path, 'w') as fout:
fout.write(result.encode('utf-8'))
return 0
if __name__ == '__main__':
exit_code = main(sys.argv)
sys.exit(exit_code)
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
| bsd-2-clause | -6,001,352,532,895,937,000 | 24.252941 | 126 | 0.583042 | false |
wbrefvem/openshift-ansible | playbooks/common/openshift-cluster/upgrades/library/openshift_upgrade_config.py | 35 | 5312 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Ansible module for modifying OpenShift configs during an upgrade"""
import os
import yaml
DOCUMENTATION = '''
---
module: openshift_upgrade_config
short_description: OpenShift Upgrade Config
author: Jason DeTiberus
requirements: [ ]
'''
EXAMPLES = '''
'''
def modify_api_levels(level_list, remove, ensure, msg_prepend='',
msg_append=''):
""" modify_api_levels """
changed = False
changes = []
if not isinstance(remove, list):
remove = []
if not isinstance(ensure, list):
ensure = []
if not isinstance(level_list, list):
new_list = []
changed = True
changes.append("%s created missing %s" % (msg_prepend, msg_append))
else:
new_list = level_list
for level in remove:
if level in new_list:
new_list.remove(level)
changed = True
changes.append("%s removed %s %s" % (msg_prepend, level, msg_append))
for level in ensure:
if level not in new_list:
new_list.append(level)
changed = True
changes.append("%s added %s %s" % (msg_prepend, level, msg_append))
return {'new_list': new_list, 'changed': changed, 'changes': changes}
def upgrade_master_3_0_to_3_1(ansible_module, config_base, backup):
"""Main upgrade method for 3.0 to 3.1."""
changes = []
# Facts do not get transferred to the hosts where custom modules run,
# need to make some assumptions here.
master_config = os.path.join(config_base, 'master/master-config.yaml')
master_cfg_file = open(master_config, 'r')
config = yaml.safe_load(master_cfg_file.read())
master_cfg_file.close()
# Remove unsupported api versions and ensure supported api versions from
# master config
unsupported_levels = ['v1beta1', 'v1beta2', 'v1beta3']
supported_levels = ['v1']
result = modify_api_levels(config.get('apiLevels'), unsupported_levels,
supported_levels, 'master-config.yaml:', 'from apiLevels')
if result['changed']:
config['apiLevels'] = result['new_list']
changes.append(result['changes'])
if 'kubernetesMasterConfig' in config and 'apiLevels' in config['kubernetesMasterConfig']:
config['kubernetesMasterConfig'].pop('apiLevels')
changes.append('master-config.yaml: removed kubernetesMasterConfig.apiLevels')
# Add masterCA to serviceAccountConfig
if 'serviceAccountConfig' in config and 'masterCA' not in config['serviceAccountConfig']:
config['serviceAccountConfig']['masterCA'] = config['oauthConfig'].get('masterCA', 'ca.crt')
# Add proxyClientInfo to master-config
if 'proxyClientInfo' not in config['kubernetesMasterConfig']:
config['kubernetesMasterConfig']['proxyClientInfo'] = {
'certFile': 'master.proxy-client.crt',
'keyFile': 'master.proxy-client.key'
}
changes.append("master-config.yaml: added proxyClientInfo")
if len(changes) > 0:
if backup:
# TODO: Check success:
ansible_module.backup_local(master_config)
# Write the modified config:
out_file = open(master_config, 'w')
out_file.write(yaml.safe_dump(config, default_flow_style=False))
out_file.close()
return changes
def upgrade_master(ansible_module, config_base, from_version, to_version, backup):
"""Upgrade entry point."""
if from_version == '3.0':
if to_version == '3.1':
return upgrade_master_3_0_to_3_1(ansible_module, config_base, backup)
def main():
""" main """
# disabling pylint errors for global-variable-undefined and invalid-name
# for 'global module' usage, since it is required to use ansible_facts
# pylint: disable=global-variable-undefined, invalid-name,
# redefined-outer-name
global module
module = AnsibleModule( # noqa: F405
argument_spec=dict(
config_base=dict(required=True),
from_version=dict(required=True, choices=['3.0']),
to_version=dict(required=True, choices=['3.1']),
role=dict(required=True, choices=['master']),
backup=dict(required=False, default=True, type='bool')
),
supports_check_mode=True,
)
from_version = module.params['from_version']
to_version = module.params['to_version']
role = module.params['role']
backup = module.params['backup']
config_base = module.params['config_base']
try:
changes = []
if role == 'master':
changes = upgrade_master(module, config_base, from_version,
to_version, backup)
changed = len(changes) > 0
return module.exit_json(changed=changed, changes=changes)
# ignore broad-except error to avoid stack trace to ansible user
# pylint: disable=broad-except
except Exception as e:
return module.fail_json(msg=str(e))
# ignore pylint errors related to the module_utils import
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, wrong-import-position
# import module snippets
from ansible.module_utils.basic import * # noqa: E402,F403
if __name__ == '__main__':
main()
| apache-2.0 | 3,197,807,509,417,617,000 | 32.834395 | 100 | 0.630271 | false |
etkirsch/scikit-learn | sklearn/datasets/species_distributions.py | 198 | 7923 | """
=============================
Species distribution dataset
=============================
This dataset represents the geographic distribution of species.
The dataset is provided by Phillips et. al. (2006).
The two species are:
- `"Bradypus variegatus"
<http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,
the Brown-throated Sloth.
- `"Microryzomys minutus"
<http://www.iucnredlist.org/apps/redlist/details/13408/0>`_ ,
also known as the Forest Small Rice Rat, a rodent that lives in Peru,
Colombia, Ecuador, Peru, and Venezuela.
References:
* `"Maximum entropy modeling of species geographic distributions"
<http://www.cs.princeton.edu/~schapire/papers/ecolmod.pdf>`_
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
190:231-259, 2006.
Notes:
* See examples/applications/plot_species_distribution_modeling.py
for an example of using this dataset
"""
# Authors: Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Jake Vanderplas <vanderplas@astro.washington.edu>
#
# License: BSD 3 clause
from io import BytesIO
from os import makedirs
from os.path import join
from os.path import exists
try:
# Python 2
from urllib2 import urlopen
PY2 = True
except ImportError:
# Python 3
from urllib.request import urlopen
PY2 = False
import numpy as np
from sklearn.datasets.base import get_data_home, Bunch
from sklearn.externals import joblib
DIRECTORY_URL = "http://www.cs.princeton.edu/~schapire/maxent/datasets/"
SAMPLES_URL = join(DIRECTORY_URL, "samples.zip")
COVERAGES_URL = join(DIRECTORY_URL, "coverages.zip")
DATA_ARCHIVE_NAME = "species_coverage.pkz"
def _load_coverage(F, header_length=6, dtype=np.int16):
"""Load a coverage file from an open file object.
This will return a numpy array of the given dtype
"""
header = [F.readline() for i in range(header_length)]
make_tuple = lambda t: (t.split()[0], float(t.split()[1]))
header = dict([make_tuple(line) for line in header])
M = np.loadtxt(F, dtype=dtype)
nodata = header[b'NODATA_value']
if nodata != -9999:
print(nodata)
M[nodata] = -9999
return M
def _load_csv(F):
"""Load csv file.
Parameters
----------
F : file object
CSV file open in byte mode.
Returns
-------
rec : np.ndarray
record array representing the data
"""
if PY2:
# Numpy recarray wants Python 2 str but not unicode
names = F.readline().strip().split(',')
else:
# Numpy recarray wants Python 3 str but not bytes...
names = F.readline().decode('ascii').strip().split(',')
rec = np.loadtxt(F, skiprows=0, delimiter=',', dtype='a22,f4,f4')
rec.dtype.names = names
return rec
def construct_grids(batch):
"""Construct the map grid from the batch object
Parameters
----------
batch : Batch object
The object returned by :func:`fetch_species_distributions`
Returns
-------
(xgrid, ygrid) : 1-D arrays
The grid corresponding to the values in batch.coverages
"""
# x,y coordinates for corner cells
xmin = batch.x_left_lower_corner + batch.grid_size
xmax = xmin + (batch.Nx * batch.grid_size)
ymin = batch.y_left_lower_corner + batch.grid_size
ymax = ymin + (batch.Ny * batch.grid_size)
# x coordinates of the grid cells
xgrid = np.arange(xmin, xmax, batch.grid_size)
# y coordinates of the grid cells
ygrid = np.arange(ymin, ymax, batch.grid_size)
return (xgrid, ygrid)
def fetch_species_distributions(data_home=None,
download_if_missing=True):
"""Loader for species distribution dataset from Phillips et. al. (2006)
Read more in the :ref:`User Guide <datasets>`.
Parameters
----------
data_home : optional, default: None
Specify another download and cache folder for the datasets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
download_if_missing: optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
--------
The data is returned as a Bunch object with the following attributes:
coverages : array, shape = [14, 1592, 1212]
These represent the 14 features measured at each point of the map grid.
The latitude/longitude values for the grid are discussed below.
Missing data is represented by the value -9999.
train : record array, shape = (1623,)
The training points for the data. Each point has three fields:
- train['species'] is the species name
- train['dd long'] is the longitude, in degrees
- train['dd lat'] is the latitude, in degrees
test : record array, shape = (619,)
The test points for the data. Same format as the training data.
Nx, Ny : integers
The number of longitudes (x) and latitudes (y) in the grid
x_left_lower_corner, y_left_lower_corner : floats
The (x,y) position of the lower-left corner, in degrees
grid_size : float
The spacing between points of the grid, in degrees
Notes
------
This dataset represents the geographic distribution of species.
The dataset is provided by Phillips et. al. (2006).
The two species are:
- `"Bradypus variegatus"
<http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,
the Brown-throated Sloth.
- `"Microryzomys minutus"
<http://www.iucnredlist.org/apps/redlist/details/13408/0>`_ ,
also known as the Forest Small Rice Rat, a rodent that lives in Peru,
Colombia, Ecuador, Peru, and Venezuela.
References
----------
* `"Maximum entropy modeling of species geographic distributions"
<http://www.cs.princeton.edu/~schapire/papers/ecolmod.pdf>`_
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
190:231-259, 2006.
Notes
-----
* See examples/applications/plot_species_distribution_modeling.py
for an example of using this dataset with scikit-learn
"""
data_home = get_data_home(data_home)
if not exists(data_home):
makedirs(data_home)
# Define parameters for the data files. These should not be changed
# unless the data model changes. They will be saved in the npz file
# with the downloaded data.
extra_params = dict(x_left_lower_corner=-94.8,
Nx=1212,
y_left_lower_corner=-56.05,
Ny=1592,
grid_size=0.05)
dtype = np.int16
if not exists(join(data_home, DATA_ARCHIVE_NAME)):
print('Downloading species data from %s to %s' % (SAMPLES_URL,
data_home))
X = np.load(BytesIO(urlopen(SAMPLES_URL).read()))
for f in X.files:
fhandle = BytesIO(X[f])
if 'train' in f:
train = _load_csv(fhandle)
if 'test' in f:
test = _load_csv(fhandle)
print('Downloading coverage data from %s to %s' % (COVERAGES_URL,
data_home))
X = np.load(BytesIO(urlopen(COVERAGES_URL).read()))
coverages = []
for f in X.files:
fhandle = BytesIO(X[f])
print(' - converting', f)
coverages.append(_load_coverage(fhandle))
coverages = np.asarray(coverages, dtype=dtype)
bunch = Bunch(coverages=coverages,
test=test,
train=train,
**extra_params)
joblib.dump(bunch, join(data_home, DATA_ARCHIVE_NAME), compress=9)
else:
bunch = joblib.load(join(data_home, DATA_ARCHIVE_NAME))
return bunch
| bsd-3-clause | -2,717,994,734,983,694,000 | 30.070588 | 79 | 0.621608 | false |
freddierice/volatility | volatility/plugins/linux/vma_cache.py | 58 | 2724 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.obj as obj
import volatility.plugins.linux.common as linux_common
from volatility.plugins.linux.slab_info import linux_slabinfo
class linux_vma_cache(linux_common.AbstractLinuxCommand):
"""Gather VMAs from the vm_area_struct cache"""
def __init__(self, config, *args, **kwargs):
linux_common.AbstractLinuxCommand.__init__(self, config, *args, **kwargs)
self._config.add_option('UNALLOCATED', short_option = 'u',
default = False,
help = 'Show unallocated',
action = 'store_true')
def calculate(self):
linux_common.set_plugin_members(self)
has_owner = self.profile.obj_has_member("mm_struct", "owner")
cache = linux_slabinfo(self._config).get_kmem_cache("vm_area_struct", self._config.UNALLOCATED)
for vm in cache:
start = vm.vm_start
end = vm.vm_end
if has_owner and vm.vm_mm and vm.vm_mm.is_valid():
task = vm.vm_mm.owner
(task_name, pid) = (task.comm, task.pid)
else:
(task_name, pid) = ("", "")
if vm.vm_file and vm.vm_file.is_valid():
path = vm.vm_file.dentry.get_partial_path()
else:
path = ""
yield task_name, pid, start, end, path
def render_text(self, outfd, data):
self.table_header(outfd, [("Process", "16"),
("PID", "6"),
("Start", "[addrpad]"),
("End", "[addrpad]"),
("Path", "")])
for task_name, pid, start, end, path in data:
self.table_row(outfd, task_name, pid, start, end, path)
| gpl-2.0 | -4,814,854,956,437,420,000 | 28.934066 | 103 | 0.57232 | false |
rrampage/rethinkdb | external/v8_3.30.33.16/buildtools/checkdeps/rules.py | 65 | 6692 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Base classes to represent dependency rules, used by checkdeps.py"""
import os
import re
class Rule(object):
"""Specifies a single rule for an include, which can be one of
ALLOW, DISALLOW and TEMP_ALLOW.
"""
# These are the prefixes used to indicate each type of rule. These
# are also used as values for self.allow to indicate which type of
# rule this is.
ALLOW = '+'
DISALLOW = '-'
TEMP_ALLOW = '!'
def __init__(self, allow, directory, dependent_directory, source):
self.allow = allow
self._dir = directory
self._dependent_dir = dependent_directory
self._source = source
def __str__(self):
return '"%s%s" from %s.' % (self.allow, self._dir, self._source)
def AsDependencyTuple(self):
"""Returns a tuple (allow, dependent dir, dependee dir) for this rule,
which is fully self-sufficient to answer the question whether the dependent
is allowed to depend on the dependee, without knowing the external
context."""
return self.allow, self._dependent_dir or '.', self._dir or '.'
def ParentOrMatch(self, other):
"""Returns true if the input string is an exact match or is a parent
of the current rule. For example, the input "foo" would match "foo/bar"."""
return self._dir == other or self._dir.startswith(other + '/')
def ChildOrMatch(self, other):
"""Returns true if the input string would be covered by this rule. For
example, the input "foo/bar" would match the rule "foo"."""
return self._dir == other or other.startswith(self._dir + '/')
class MessageRule(Rule):
"""A rule that has a simple message as the reason for failing,
unrelated to directory or source.
"""
def __init__(self, reason):
super(MessageRule, self).__init__(Rule.DISALLOW, '', '', '')
self._reason = reason
def __str__(self):
return self._reason
def ParseRuleString(rule_string, source):
"""Returns a tuple of a character indicating what type of rule this
is, and a string holding the path the rule applies to.
"""
if not rule_string:
raise Exception('The rule string "%s" is empty\nin %s' %
(rule_string, source))
if not rule_string[0] in [Rule.ALLOW, Rule.DISALLOW, Rule.TEMP_ALLOW]:
raise Exception(
'The rule string "%s" does not begin with a "+", "-" or "!".' %
rule_string)
return rule_string[0], rule_string[1:]
class Rules(object):
"""Sets of rules for files in a directory.
By default, rules are added to the set of rules applicable to all
dependee files in the directory. Rules may also be added that apply
only to dependee files whose filename (last component of their path)
matches a given regular expression; hence there is one additional
set of rules per unique regular expression.
"""
def __init__(self):
"""Initializes the current rules with an empty rule list for all
files.
"""
# We keep the general rules out of the specific rules dictionary,
# as we need to always process them last.
self._general_rules = []
# Keys are regular expression strings, values are arrays of rules
# that apply to dependee files whose basename matches the regular
# expression. These are applied before the general rules, but
# their internal order is arbitrary.
self._specific_rules = {}
def __str__(self):
result = ['Rules = {\n (apply to all files): [\n%s\n ],' % '\n'.join(
' %s' % x for x in self._general_rules)]
for regexp, rules in self._specific_rules.iteritems():
result.append(' (limited to files matching %s): [\n%s\n ]' % (
regexp, '\n'.join(' %s' % x for x in rules)))
result.append(' }')
return '\n'.join(result)
def AsDependencyTuples(self, include_general_rules, include_specific_rules):
"""Returns a list of tuples (allow, dependent dir, dependee dir) for the
specified rules (general/specific). Currently only general rules are
supported."""
def AddDependencyTuplesImpl(deps, rules, extra_dependent_suffix=""):
for rule in rules:
(allow, dependent, dependee) = rule.AsDependencyTuple()
tup = (allow, dependent + extra_dependent_suffix, dependee)
deps.add(tup)
deps = set()
if include_general_rules:
AddDependencyTuplesImpl(deps, self._general_rules)
if include_specific_rules:
for regexp, rules in self._specific_rules.iteritems():
AddDependencyTuplesImpl(deps, rules, "/" + regexp)
return deps
def AddRule(self, rule_string, dependent_dir, source, dependee_regexp=None):
"""Adds a rule for the given rule string.
Args:
rule_string: The include_rule string read from the DEPS file to apply.
source: A string representing the location of that string (filename, etc.)
so that we can give meaningful errors.
dependent_dir: The directory to which this rule applies.
dependee_regexp: The rule will only be applied to dependee files
whose filename (last component of their path)
matches the expression. None to match all
dependee files.
"""
rule_type, rule_dir = ParseRuleString(rule_string, source)
if not dependee_regexp:
rules_to_update = self._general_rules
else:
if dependee_regexp in self._specific_rules:
rules_to_update = self._specific_rules[dependee_regexp]
else:
rules_to_update = []
# Remove any existing rules or sub-rules that apply. For example, if we're
# passed "foo", we should remove "foo", "foo/bar", but not "foobar".
rules_to_update = [x for x in rules_to_update
if not x.ParentOrMatch(rule_dir)]
rules_to_update.insert(0, Rule(rule_type, rule_dir, dependent_dir, source))
if not dependee_regexp:
self._general_rules = rules_to_update
else:
self._specific_rules[dependee_regexp] = rules_to_update
def RuleApplyingTo(self, include_path, dependee_path):
"""Returns the rule that applies to |include_path| for a dependee
file located at |dependee_path|.
"""
dependee_filename = os.path.basename(dependee_path)
for regexp, specific_rules in self._specific_rules.iteritems():
if re.match(regexp, dependee_filename):
for rule in specific_rules:
if rule.ChildOrMatch(include_path):
return rule
for rule in self._general_rules:
if rule.ChildOrMatch(include_path):
return rule
return MessageRule('no rule applying.')
| agpl-3.0 | 5,304,136,728,670,381,000 | 36.595506 | 80 | 0.662283 | false |
manthansharma/kivy | kivy/core/camera/__init__.py | 15 | 4285 | '''
Camera
======
Core class for acquiring the camera and converting its input into a
:class:`~kivy.graphics.texture.Texture`.
.. versionchanged:: 1.10.0
The pygst and videocapture providers have been removed.
.. versionchanged:: 1.8.0
There is now 2 distinct Gstreamer implementation: one using Gi/Gst
working for both Python 2+3 with Gstreamer 1.0, and one using PyGST
working only for Python 2 + Gstreamer 0.10.
'''
__all__ = ('CameraBase', 'Camera')
from kivy.utils import platform
from kivy.event import EventDispatcher
from kivy.logger import Logger
from kivy.core import core_select_lib
class CameraBase(EventDispatcher):
'''Abstract Camera Widget class.
Concrete camera classes must implement initialization and
frame capturing to a buffer that can be uploaded to the gpu.
:Parameters:
`index`: int
Source index of the camera.
`size`: tuple (int, int)
Size at which the image is drawn. If no size is specified,
it defaults to the resolution of the camera image.
`resolution`: tuple (int, int)
Resolution to try to request from the camera.
Used in the gstreamer pipeline by forcing the appsink caps
to this resolution. If the camera doesnt support the resolution,
a negotiation error might be thrown.
:Events:
`on_load`
Fired when the camera is loaded and the texture has become
available.
`on_texture`
Fired each time the camera texture is updated.
'''
__events__ = ('on_load', 'on_texture')
def __init__(self, **kwargs):
kwargs.setdefault('stopped', False)
kwargs.setdefault('resolution', (640, 480))
kwargs.setdefault('index', 0)
self.stopped = kwargs.get('stopped')
self._resolution = kwargs.get('resolution')
self._index = kwargs.get('index')
self._buffer = None
self._format = 'rgb'
self._texture = None
self.capture_device = None
kwargs.setdefault('size', self._resolution)
super(CameraBase, self).__init__()
self.init_camera()
if not self.stopped:
self.start()
def _set_resolution(self, res):
self._resolution = res
self.init_camera()
def _get_resolution(self):
return self._resolution
resolution = property(lambda self: self._get_resolution(),
lambda self, x: self._set_resolution(x),
doc='Resolution of camera capture (width, height)')
def _set_index(self, x):
if x == self._index:
return
self._index = x
self.init_camera()
def _get_index(self):
return self._x
index = property(lambda self: self._get_index(),
lambda self, x: self._set_index(x),
doc='Source index of the camera')
def _get_texture(self):
return self._texture
texture = property(lambda self: self._get_texture(),
doc='Return the camera texture with the latest capture')
def init_camera(self):
'''Initialise the camera (internal)'''
pass
def start(self):
'''Start the camera acquire'''
self.stopped = False
def stop(self):
'''Release the camera'''
self.stopped = True
def _update(self, dt):
'''Update the camera (internal)'''
pass
def _copy_to_gpu(self):
'''Copy the the buffer into the texture'''
if self._texture is None:
Logger.debug('Camera: copy_to_gpu() failed, _texture is None !')
return
self._texture.blit_buffer(self._buffer, colorfmt=self._format)
self._buffer = None
self.dispatch('on_texture')
def on_texture(self):
pass
def on_load(self):
pass
# Load the appropriate providers
providers = ()
if platform == 'macosx':
providers += (('avfoundation', 'camera_avfoundation',
'CameraAVFoundation'), )
elif platform == 'android':
providers += (('android', 'camera_android', 'CameraAndroid'), )
providers += (('opencv', 'camera_opencv', 'CameraOpenCV'), )
Camera = core_select_lib('camera', (providers))
| mit | 8,243,352,725,636,989,000 | 27.952703 | 79 | 0.599767 | false |
J-Rios/TelegramBots | MolaBot/TSjson.py | 1 | 10795 | # -*- coding: utf-8 -*-
'''
Script: MolaBot.py
Descripcion:
Bot de Telegram que gestiona todo un sistema de reputaciones de los usuarios pertenecientes a
un grupo. Permite a un usuario, dar "Likes" a los mensajes de otros, y el numero global de
"Likes" (la suma de todos los likes de todos los mensajes de un mismo usuario) determinara los
puntos de reputacion que dicho usuario tiene.
Autor: Jose Rios Rubio
Fecha: 26/07/2017
Version: 1.7
'''
import os
import json
from threading import Lock
from collections import OrderedDict
class TSjson:
'''
Clase de acceso para lectura y escritura de archivos json generales de forma segura desde
cualquier hilo de ejecucion (Thread-Safe).
'''
# Constructor de la clase
def __init__(self, file_name):
self.lock = Lock() #Inicializa el Lock
self.file_name = file_name # Adquiere el nombre del archivo a controlar
# Funcion para leer de un archivo json
def read(self):
try: # Intentar abrir el archivo
self.lock.acquire() # Cerramos (adquirimos) el mutex
if not os.path.exists(self.file_name): # Si el archivo no existe
read = {} # Devolver un diccionario vacio
else: # Si el archivo existe
if not os.stat(self.file_name).st_size: # Si el archivo esta vacio
read = {} # Devolver un diccionario vacio
else: # El archivo existe y tiene contenido
with open(self.file_name, "r") as f: # Abrir el archivo en modo lectura
read = json.load(f, object_pairs_hook=OrderedDict) # Leer todo el archivo y devolver la lectura de los datos json usando un diccionario ordenado
except: # Error intentando abrir el archivo
print(" Error cuando se abria para lectura, el archivo {}".format(self.file_name)) # Escribir en consola el error
read = None # Devolver None
finally: # Para acabar, haya habido excepcion o no
self.lock.release() # Abrimos (liberamos) el mutex
return read # Devolver el resultado de la lectura de la funcion
# Funcion para escribir en un archivo json
def write(self, data):
# Si no existe el directorio que contiene los archivos de datos, lo creamos
directory = os.path.dirname(self.file_name) # Obtener el nombre del directorio que contiene al archivo
if not os.path.exists(directory): # Si el directorio (ruta) no existe
os.makedirs(directory) # Creamos el directorio
try: # Intentar abrir el archivo
self.lock.acquire() # Cerramos (adquirimos) el mutex
with open(self.file_name, "w") as f: # Abrir el archivo en modo escritura (sobre-escribe)
#if CONST['PYTHON'] == 2: # Compatibilidad con Python 2
# f.write("\n{}\n".format(json.dumps(data, ensure_ascii=False, indent=4))) # Escribimos en el archivo los datos json asegurando todos los caracteres ascii, codificacion utf-8 y una "indentacion" de 4 espacios
#else:
f.write("\n{}\n".format(json.dumps(data, indent=4))) # Escribimos en el archivo los datos json asegurando todos los caracteres ascii, codificacion utf-8 y una "indentacion" de 4 espacios
except: # Error intentando abrir el archivo
print(" Error cuando se abria para escritura, el archivo {}".format(self.file_name)) # Escribir en consola el error
finally: # Para acabar, haya habido excepcion o no
self.lock.release() # Abrimos (liberamos) el mutex
# Funcion para leer el contenido de un archivo json (datos json)
def read_content(self):
read = self.read() # Leer todo el archivo json
if read != {}: # Si la lectura no es vacia
return read['Content'] # Devolvemos el contenido de la lectura (datos json)
else: # Lectura vacia
return read # Devolvemos la lectura vacia
# Funcion para añadir al contenido de un archivo json, nuevos datos json
def write_content(self, data):
# Si no existe el directorio que contiene los archivos de datos, lo creamos
directory = os.path.dirname(self.file_name) # Obtener el nombre del directorio que contiene al archivo
if not os.path.exists(directory): # Si el directorio (ruta) no existe
os.makedirs(directory) # Creamos el directorio
try: # Intentar abrir el archivo
self.lock.acquire() # Cerramos (adquirimos) el mutex
if os.path.exists(self.file_name) and os.stat(self.file_name).st_size: # Si el archivo existe y no esta vacio
with open(self.file_name, "r") as f: # Abrir el archivo en modo lectura
content = json.load(f, object_pairs_hook=OrderedDict) # Leer todo el archivo y devolver la lectura de los datos json usando un diccionario ordenado
content['Content'].append(data) # Añadir los nuevos datos al contenido del json
with open(self.file_name, "w") as f: # Abrir el archivo en modo escritura (sobre-escribe)
f.write("\n{}\n".format(json.dumps(content, indent=4))) # Escribimos en el archivo los datos json asegurando todos los caracteres ascii, codificacion utf-8 y una "indentacion" de 4 espacios
else: # El archivo no existe o esta vacio
with open(self.file_name, "w") as f: # Abrir el archivo en modo escritura (sobre-escribe)
f.write('\n{\n "Content": []\n}\n') # Escribir la estructura de contenido basica
with open(self.file_name, "r") as f: # Abrir el archivo en modo lectura
content = json.load(f) # Leer todo el archivo
content['Content'].append(data) # Añadir los datos al contenido del json
with open(self.file_name, "w") as f: # Abrir el archivo en modo escritura (sobre-escribe)
f.write("\n{}\n".format(json.dumps(content, indent=4))) # Escribimos en el archivo los datos json asegurando todos los caracteres ascii, codificacion utf-8 y una "indentacion" de 4 espacios
except IOError as e:
print(" I/O error({0}): {1}".format(e.errno, e.strerror))
except ValueError:
print(" Error en conversion de dato")
except: # Error intentando abrir el archivo
print(" Error cuando se abria para escritura, el archivo {}".format(self.file_name)) # Escribir en consola el error
finally: # Para acabar, haya habido excepcion o no
self.lock.release() # Abrimos (liberamos) el mutex
# Funcion para actualizar datos de un archivo json
# [Nota: cada dato json necesita al menos 1 elemento identificador unico (uide), si no es asi, la actualizacion se producira en el primer elemento que se encuentre]
def update(self, data, uide):
file_data = self.read() # Leer todo el archivo json
# Buscar la posicion del dato en el contenido json
found = 0 # Posicion encontrada a 0
i = 0 # Posicion inicial del dato a 0
for msg in file_data['Content']: # Para cada mensaje en el archivo json
if data[uide] == msg[uide]: # Si el mensaje tiene el UIDE buscado
found = 1 # Marcar que se ha encontrado la posicion
break # Interrumpir y salir del bucle
i = i + 1 # Incrementar la posicion del dato
if found: # Si se encontro en el archivo json datos con el UIDE buscado
file_data['Content'][i] = data # Actualizamos los datos json que contiene ese UIDE
self.write(file_data) # Escribimos el dato actualizado en el archivo json
else: # No se encontro ningun dato json con dicho UIDE
print(" Error: UIDE no encontrado en el archivo, o el archivo no existe") # Escribir en consola el error
# Funcion para actualizar datos internos de los datos de un archivo json
# [Nota: cada dato json necesita al menos 1 elemento identificador unico (uide), si no es asi, la actualizacion se producira en el primer elemento que se encuentre]
def update_twice(self, data, uide1, uide2):
file_data = self.read() # Leer todo el archivo json
# Buscar la posicion del dato en el contenido json
found = 0 # Posicion encontrada a 0
i = 0 # Posicion inicial del dato a 0
for msg in file_data['Content']: # Para cada mensaje en el archivo json
if (data[uide1] == msg[uide1]) and (data[uide2] == msg[uide2]): # Si el mensaje tiene el UIDE buscado
found = 1 # Marcar que se ha encontrado la posicion
break # Interrumpir y salir del bucle
i = i + 1 # Incrementar la posicion del dato
if found: # Si se encontro en el archivo json datos con el UIDE buscado
file_data['Content'][i] = data # Actualizamos los datos json que contiene ese UIDE
self.write(file_data) # Escribimos el dato actualizado en el archivo json
else: # No se encontro ningun dato json con dicho UIDE
print(" Error: UIDE no encontrado en el archivo, o el archivo no existe") # Escribir en consola el error
# Funcion para limpiar todos los datos de un archivo json (no se usa actualmente)
def clear_content(self):
try: # Intentar abrir el archivo
self.lock.acquire() # Cerramos (adquirimos) el mutex
if os.path.exists(self.file_name) and os.stat(self.file_name).st_size: # Si el archivo existe y no esta vacio
with open(self.file_name, "w") as f: # Abrir el archivo en modo escritura (sobre-escribe)
f.write('\n{\n "Content": [\n ]\n}\n') # Escribir la estructura de contenido basica
except: # Error intentando abrir el archivo
print(" Error cuando se abria para escritura, el archivo {}".format(self.file_name)) # Escribir en consola el error
finally: # Para acabar, haya habido excepcion o no
self.lock.release() # Abrimos (liberamos) el mutex
# funcion para eliminar un archivo json (no se usa actualmente)
def delete(self):
self.lock.acquire() # Cerramos (adquirimos) el mutex
if os.path.exists(self.file_name): # Si el archivo existe
os.remove(self.file_name) # Eliminamos el archivo
self.lock.release() # Abrimos (liberamos) el mutex
| gpl-3.0 | -775,332,901,642,964,100 | 54.208333 | 227 | 0.624629 | false |
powlo/script.module.pydevd | lib/test_pydevd_reload/test_pydevd_reload.py | 53 | 11450 | import os # @NoMove
import sys # @NoMove
sys.path.insert(0, os.path.realpath(os.path.abspath('..')))
import pydevd_reload
import tempfile
import unittest
SAMPLE_CODE = """
class C:
def foo(self):
return 0
@classmethod
def bar(cls):
return (0, 0)
@staticmethod
def stomp():
return (0, 0, 0)
def unchanged(self):
return 'unchanged'
"""
class Test(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.tempdir = None
self.save_path = None
self.tempdir = tempfile.mkdtemp()
self.save_path = list(sys.path)
sys.path.append(self.tempdir)
try:
del sys.modules['x']
except:
pass
def tearDown(self):
unittest.TestCase.tearDown(self)
sys.path = self.save_path
try:
del sys.modules['x']
except:
pass
def make_mod(self, name="x", repl=None, subst=None, sample=SAMPLE_CODE):
fn = os.path.join(self.tempdir, name + ".py")
f = open(fn, "w")
if repl is not None and subst is not None:
sample = sample.replace(repl, subst)
try:
f.write(sample)
finally:
f.close()
def test_pydevd_reload(self):
self.make_mod()
import x
C = x.C
COut = C
Cfoo = C.foo
Cbar = C.bar
Cstomp = C.stomp
def check2(expected):
C = x.C
Cfoo = C.foo
Cbar = C.bar
Cstomp = C.stomp
b = C()
bfoo = b.foo
self.assertEqual(expected, b.foo())
self.assertEqual(expected, bfoo())
self.assertEqual(expected, Cfoo(b))
def check(expected):
b = COut()
bfoo = b.foo
self.assertEqual(expected, b.foo())
self.assertEqual(expected, bfoo())
self.assertEqual(expected, Cfoo(b))
self.assertEqual((expected, expected), Cbar())
self.assertEqual((expected, expected, expected), Cstomp())
check2(expected)
check(0)
# modify mod and reload
count = 0
while count < 1:
count += 1
self.make_mod(repl="0", subst=str(count))
pydevd_reload.xreload(x)
check(count)
def test_pydevd_reload2(self):
self.make_mod()
import x
c = x.C()
cfoo = c.foo
self.assertEqual(0, c.foo())
self.assertEqual(0, cfoo())
self.make_mod(repl="0", subst='1')
pydevd_reload.xreload(x)
self.assertEqual(1, c.foo())
self.assertEqual(1, cfoo())
def test_pydevd_reload3(self):
class F:
def m1(self):
return 1
class G:
def m1(self):
return 2
self.assertEqual(F().m1(), 1)
pydevd_reload.Reload(None)._update(None, None, F, G)
self.assertEqual(F().m1(), 2)
def test_pydevd_reload4(self):
class F:
pass
F.m1 = lambda a:None
class G:
pass
G.m1 = lambda a:10
self.assertEqual(F().m1(), None)
pydevd_reload.Reload(None)._update(None, None, F, G)
self.assertEqual(F().m1(), 10)
def test_if_code_obj_equals(self):
class F:
def m1(self):
return 1
class G:
def m1(self):
return 1
class H:
def m1(self):
return 2
if hasattr(F.m1, 'func_code'):
self.assertTrue(pydevd_reload.code_objects_equal(F.m1.func_code, G.m1.func_code))
self.assertFalse(pydevd_reload.code_objects_equal(F.m1.func_code, H.m1.func_code))
else:
self.assertTrue(pydevd_reload.code_objects_equal(F.m1.__code__, G.m1.__code__))
self.assertFalse(pydevd_reload.code_objects_equal(F.m1.__code__, H.m1.__code__))
def test_metaclass(self):
class Meta(type):
def __init__(cls, name, bases, attrs):
super(Meta, cls).__init__(name, bases, attrs)
class F:
__metaclass__ = Meta
def m1(self):
return 1
class G:
__metaclass__ = Meta
def m1(self):
return 2
self.assertEqual(F().m1(), 1)
pydevd_reload.Reload(None)._update(None, None, F, G)
self.assertEqual(F().m1(), 2)
def test_change_hierarchy(self):
class F(object):
def m1(self):
return 1
class B(object):
def super_call(self):
return 2
class G(B):
def m1(self):
return self.super_call()
self.assertEqual(F().m1(), 1)
old = pydevd_reload.notify_error
self._called = False
def on_error(*args):
self._called = True
try:
pydevd_reload.notify_error = on_error
pydevd_reload.Reload(None)._update(None, None, F, G)
self.assertTrue(self._called)
finally:
pydevd_reload.notify_error = old
def test_change_hierarchy_old_style(self):
class F:
def m1(self):
return 1
class B:
def super_call(self):
return 2
class G(B):
def m1(self):
return self.super_call()
self.assertEqual(F().m1(), 1)
old = pydevd_reload.notify_error
self._called = False
def on_error(*args):
self._called = True
try:
pydevd_reload.notify_error = on_error
pydevd_reload.Reload(None)._update(None, None, F, G)
self.assertTrue(self._called)
finally:
pydevd_reload.notify_error = old
def test_create_class(self):
SAMPLE_CODE1 = """
class C:
def foo(self):
return 0
"""
# Creating a new class and using it from old class
SAMPLE_CODE2 = """
class B:
pass
class C:
def foo(self):
return B
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
foo = x.C().foo
self.assertEqual(foo(), 0)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(foo().__name__, 'B')
def test_create_class2(self):
SAMPLE_CODE1 = """
class C(object):
def foo(self):
return 0
"""
# Creating a new class and using it from old class
SAMPLE_CODE2 = """
class B(object):
pass
class C(object):
def foo(self):
return B
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
foo = x.C().foo
self.assertEqual(foo(), 0)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(foo().__name__, 'B')
def test_parent_function(self):
SAMPLE_CODE1 = """
class B(object):
def foo(self):
return 0
class C(B):
def call(self):
return self.foo()
"""
# Creating a new class and using it from old class
SAMPLE_CODE2 = """
class B(object):
def foo(self):
return 0
def bar(self):
return 'bar'
class C(B):
def call(self):
return self.bar()
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
call = x.C().call
self.assertEqual(call(), 0)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(call(), 'bar')
def test_update_constant(self):
SAMPLE_CODE1 = """
CONSTANT = 1
class B(object):
def foo(self):
return CONSTANT
"""
SAMPLE_CODE2 = """
CONSTANT = 2
class B(object):
def foo(self):
return CONSTANT
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
foo = x.B().foo
self.assertEqual(foo(), 1)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(foo(), 1) #Just making it explicit we don't reload constants.
def test_update_constant_with_custom_code(self):
SAMPLE_CODE1 = """
CONSTANT = 1
class B(object):
def foo(self):
return CONSTANT
"""
SAMPLE_CODE2 = """
CONSTANT = 2
def __xreload_old_new__(namespace, name, old, new):
if name == 'CONSTANT':
namespace[name] = new
class B(object):
def foo(self):
return CONSTANT
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
foo = x.B().foo
self.assertEqual(foo(), 1)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(foo(), 2) #Actually updated it now!
def test_reload_custom_code_after_changes(self):
SAMPLE_CODE1 = """
CONSTANT = 1
class B(object):
def foo(self):
return CONSTANT
"""
SAMPLE_CODE2 = """
CONSTANT = 1
def __xreload_after_reload_update__(namespace):
namespace['CONSTANT'] = 2
class B(object):
def foo(self):
return CONSTANT
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
foo = x.B().foo
self.assertEqual(foo(), 1)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(foo(), 2) #Actually updated it now!
def test_reload_custom_code_after_changes_in_class(self):
SAMPLE_CODE1 = """
class B(object):
CONSTANT = 1
def foo(self):
return self.CONSTANT
"""
SAMPLE_CODE2 = """
class B(object):
CONSTANT = 1
@classmethod
def __xreload_after_reload_update__(cls):
cls.CONSTANT = 2
def foo(self):
return self.CONSTANT
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
foo = x.B().foo
self.assertEqual(foo(), 1)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(foo(), 2) #Actually updated it now!
def test_update_constant_with_custom_code(self):
SAMPLE_CODE1 = """
class B(object):
CONSTANT = 1
def foo(self):
return self.CONSTANT
"""
SAMPLE_CODE2 = """
class B(object):
CONSTANT = 2
def __xreload_old_new__(cls, name, old, new):
if name == 'CONSTANT':
cls.CONSTANT = new
__xreload_old_new__ = classmethod(__xreload_old_new__)
def foo(self):
return self.CONSTANT
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
foo = x.B().foo
self.assertEqual(foo(), 1)
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
self.assertEqual(foo(), 2) #Actually updated it now!
def test_update_with_slots(self):
SAMPLE_CODE1 = """
class B(object):
__slots__ = ['bar']
"""
SAMPLE_CODE2 = """
class B(object):
__slots__ = ['bar', 'foo']
def m1(self):
self.bar = 10
return 1
"""
self.make_mod(sample=SAMPLE_CODE1)
import x
B = x.B
self.make_mod(sample=SAMPLE_CODE2)
pydevd_reload.xreload(x)
b = B()
self.assertEqual(1, b.m1())
self.assertEqual(10, b.bar)
self.assertRaises(Exception, setattr, b, 'foo', 20) #__slots__ can't be updated
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.test_reload_custom_code_after_changes_in_class']
unittest.main()
| epl-1.0 | 6,842,175,070,290,498,000 | 21.189922 | 94 | 0.529258 | false |
smiller171/ansible-modules-core | system/service.py | 8 | 57233 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: service
author:
- "Ansible Core Team"
- "Michael DeHaan"
version_added: "0.1"
short_description: Manage services.
description:
- Controls services on remote hosts. Supported init systems include BSD init,
OpenRC, SysV, Solaris SMF, systemd, upstart.
options:
name:
required: true
description:
- Name of the service.
state:
required: false
choices: [ started, stopped, restarted, reloaded ]
description:
- C(started)/C(stopped) are idempotent actions that will not run
commands unless necessary. C(restarted) will always bounce the
service. C(reloaded) will always reload. B(At least one of state
and enabled are required.)
sleep:
required: false
version_added: "1.3"
description:
- If the service is being C(restarted) then sleep this many seconds
between the stop and start command. This helps to workaround badly
behaving init scripts that exit immediately after signaling a process
to stop.
pattern:
required: false
version_added: "0.7"
description:
- If the service does not respond to the status command, name a
substring to look for as would be found in the output of the I(ps)
command as a stand-in for a status result. If the string is found,
the service will be assumed to be running.
enabled:
required: false
choices: [ "yes", "no" ]
description:
- Whether the service should start on boot. B(At least one of state and
enabled are required.)
runlevel:
required: false
default: 'default'
description:
- "For OpenRC init scripts (ex: Gentoo) only. The runlevel that this service belongs to."
arguments:
description:
- Additional arguments provided on the command line
aliases: [ 'args' ]
must_exist:
required: false
default: true
version_added: "2.0"
description:
- Avoid a module failure if the named service does not exist. Useful
for opportunistically starting/stopping/restarting a list of
potential services.
'''
EXAMPLES = '''
# Example action to start service httpd, if not running
- service: name=httpd state=started
# Example action to stop service httpd, if running
- service: name=httpd state=stopped
# Example action to restart service httpd, in all cases
- service: name=httpd state=restarted
# Example action to reload service httpd, in all cases
- service: name=httpd state=reloaded
# Example action to enable service httpd, and not touch the running state
- service: name=httpd enabled=yes
# Example action to start service foo, based on running process /usr/bin/foo
- service: name=foo pattern=/usr/bin/foo state=started
# Example action to restart network service for interface eth0
- service: name=network state=restarted args=eth0
# Example action to restart nova-compute if it exists
- service: name=nova-compute state=restarted must_exist=no
'''
import platform
import os
import re
import tempfile
import shlex
import select
import time
import string
import glob
# The distutils module is not shipped with SUNWPython on Solaris.
# It's in the SUNWPython-devel package which also contains development files
# that don't belong on production boxes. Since our Solaris code doesn't
# depend on LooseVersion, do not import it on Solaris.
if platform.system() != 'SunOS':
from distutils.version import LooseVersion
class Service(object):
"""
This is the generic Service manipulation class that is subclassed
based on platform.
A subclass should override the following action methods:-
- get_service_tools
- service_enable
- get_service_status
- service_control
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Service, args, kwargs)
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.state = module.params['state']
self.sleep = module.params['sleep']
self.pattern = module.params['pattern']
self.enable = module.params['enabled']
self.runlevel = module.params['runlevel']
self.changed = False
self.running = None
self.crashed = None
self.action = None
self.svc_cmd = None
self.svc_initscript = None
self.svc_initctl = None
self.enable_cmd = None
self.arguments = module.params.get('arguments', '')
self.rcconf_file = None
self.rcconf_key = None
self.rcconf_value = None
self.svc_change = False
# select whether we dump additional debug info through syslog
self.syslogging = False
# ===========================================
# Platform specific methods (must be replaced by subclass).
def get_service_tools(self):
self.module.fail_json(msg="get_service_tools not implemented on target platform")
def service_enable(self):
self.module.fail_json(msg="service_enable not implemented on target platform")
def get_service_status(self):
self.module.fail_json(msg="get_service_status not implemented on target platform")
def service_control(self):
self.module.fail_json(msg="service_control not implemented on target platform")
# ===========================================
# Generic methods that should be used on all platforms.
def execute_command(self, cmd, daemonize=False):
if self.syslogging:
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Command %s, daemonize %r' % (cmd, daemonize))
# Most things don't need to be daemonized
if not daemonize:
return self.module.run_command(cmd)
# This is complex because daemonization is hard for people.
# What we do is daemonize a part of this module, the daemon runs the
# command, picks up the return code and output, and returns it to the
# main process.
pipe = os.pipe()
pid = os.fork()
if pid == 0:
os.close(pipe[0])
# Set stdin/stdout/stderr to /dev/null
fd = os.open(os.devnull, os.O_RDWR)
if fd != 0:
os.dup2(fd, 0)
if fd != 1:
os.dup2(fd, 1)
if fd != 2:
os.dup2(fd, 2)
if fd not in (0, 1, 2):
os.close(fd)
# Make us a daemon. Yes, that's all it takes.
pid = os.fork()
if pid > 0:
os._exit(0)
os.setsid()
os.chdir("/")
pid = os.fork()
if pid > 0:
os._exit(0)
# Start the command
if isinstance(cmd, basestring):
cmd = shlex.split(cmd)
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1]))
stdout = ""
stderr = ""
fds = [p.stdout, p.stderr]
# Wait for all output, or until the main process is dead and its output is done.
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if not (rfd + wfd + efd) and p.poll() is not None:
break
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), 4096)
if not dat:
fds.remove(p.stdout)
stdout += dat
if p.stderr in rfd:
dat = os.read(p.stderr.fileno(), 4096)
if not dat:
fds.remove(p.stderr)
stderr += dat
p.wait()
# Return a JSON blob to parent
os.write(pipe[1], json.dumps([p.returncode, stdout, stderr]))
os.close(pipe[1])
os._exit(0)
elif pid == -1:
self.module.fail_json(msg="unable to fork")
else:
os.close(pipe[1])
os.waitpid(pid, 0)
# Wait for data from daemon process and process it.
data = ""
while True:
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
if pipe[0] in rfd:
dat = os.read(pipe[0], 4096)
if not dat:
break
data += dat
return json.loads(data)
def check_ps(self):
# Set ps flags
if platform.system() == 'SunOS':
psflags = '-ef'
else:
psflags = 'auxww'
# Find ps binary
psbin = self.module.get_bin_path('ps', True)
(rc, psout, pserr) = self.execute_command('%s %s' % (psbin, psflags))
# If rc is 0, set running as appropriate
if rc == 0:
self.running = False
lines = psout.split("\n")
for line in lines:
if self.pattern in line and not "pattern=" in line:
# so as to not confuse ./hacking/test-module
self.running = True
break
def check_service_changed(self):
if self.state and self.running is None:
self.module.fail_json(msg="failed determining service state, possible typo of service name?")
# Find out if state has changed
if not self.running and self.state in ["started", "running", "reloaded"]:
self.svc_change = True
elif self.running and self.state in ["stopped","reloaded"]:
self.svc_change = True
elif self.state == "restarted":
self.svc_change = True
if self.module.check_mode and self.svc_change:
self.module.exit_json(changed=True, msg='service state changed')
def modify_service_state(self):
# Only do something if state will change
if self.svc_change:
# Control service
if self.state in ['started', 'running']:
self.action = "start"
elif not self.running and self.state == 'reloaded':
self.action = "start"
elif self.state == 'stopped':
self.action = "stop"
elif self.state == 'reloaded':
self.action = "reload"
elif self.state == 'restarted':
self.action = "restart"
if self.module.check_mode:
self.module.exit_json(changed=True, msg='changing service state')
return self.service_control()
else:
# If nothing needs to change just say all is well
rc = 0
err = ''
out = ''
return rc, out, err
def service_enable_rcconf(self):
if self.rcconf_file is None or self.rcconf_key is None or self.rcconf_value is None:
self.module.fail_json(msg="service_enable_rcconf() requires rcconf_file, rcconf_key and rcconf_value")
self.changed = None
entry = '%s="%s"\n' % (self.rcconf_key, self.rcconf_value)
RCFILE = open(self.rcconf_file, "r")
new_rc_conf = []
# Build a list containing the possibly modified file.
for rcline in RCFILE:
# Parse line removing whitespaces, quotes, etc.
rcarray = shlex.split(rcline, comments=True)
if len(rcarray) >= 1 and '=' in rcarray[0]:
(key, value) = rcarray[0].split("=", 1)
if key == self.rcconf_key:
if value.upper() == self.rcconf_value:
# Since the proper entry already exists we can stop iterating.
self.changed = False
break
else:
# We found the key but the value is wrong, replace with new entry.
rcline = entry
self.changed = True
# Add line to the list.
new_rc_conf.append(rcline)
# We are done with reading the current rc.conf, close it.
RCFILE.close()
# If we did not see any trace of our entry we need to add it.
if self.changed is None:
new_rc_conf.append(entry)
self.changed = True
if self.changed is True:
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
# Create a temporary file next to the current rc.conf (so we stay on the same filesystem).
# This way the replacement operation is atomic.
rcconf_dir = os.path.dirname(self.rcconf_file)
rcconf_base = os.path.basename(self.rcconf_file)
(TMP_RCCONF, tmp_rcconf_file) = tempfile.mkstemp(dir=rcconf_dir, prefix="%s-" % rcconf_base)
# Write out the contents of the list into our temporary file.
for rcline in new_rc_conf:
os.write(TMP_RCCONF, rcline)
# Close temporary file.
os.close(TMP_RCCONF)
# Replace previous rc.conf.
self.module.atomic_move(tmp_rcconf_file, self.rcconf_file)
# ===========================================
# Subclass: Linux
class LinuxService(Service):
"""
This is the Linux Service manipulation class - it is currently supporting
a mixture of binaries and init scripts for controlling services started at
boot, as well as for controlling the current state.
"""
platform = 'Linux'
distribution = None
def get_service_tools(self):
paths = [ '/sbin', '/usr/sbin', '/bin', '/usr/bin' ]
binaries = [ 'service', 'chkconfig', 'update-rc.d', 'rc-service', 'rc-update', 'initctl', 'systemctl', 'start', 'stop', 'restart', 'insserv' ]
initpaths = [ '/etc/init.d' ]
location = dict()
for binary in binaries:
location[binary] = self.module.get_bin_path(binary)
for initdir in initpaths:
initscript = "%s/%s" % (initdir,self.name)
if os.path.isfile(initscript):
self.svc_initscript = initscript
def check_systemd():
# verify systemd is installed (by finding systemctl)
if not location.get('systemctl', False):
return False
# Check if init is the systemd command, using comm as cmdline could be symlink
try:
f = open('/proc/1/comm', 'r')
except IOError, err:
# If comm doesn't exist, old kernel, no systemd
return False
for line in f:
if 'systemd' in line:
return True
return False
# Locate a tool to enable/disable a service
if location.get('systemctl',False) and check_systemd():
# service is managed by systemd
self.__systemd_unit = self.name
self.svc_cmd = location['systemctl']
self.enable_cmd = location['systemctl']
elif location.get('initctl', False) and os.path.exists("/etc/init/%s.conf" % self.name):
# service is managed by upstart
self.enable_cmd = location['initctl']
# set the upstart version based on the output of 'initctl version'
self.upstart_version = LooseVersion('0.0.0')
try:
version_re = re.compile(r'\(upstart (.*)\)')
rc,stdout,stderr = self.module.run_command('initctl version')
if rc == 0:
res = version_re.search(stdout)
if res:
self.upstart_version = LooseVersion(res.groups()[0])
except:
pass # we'll use the default of 0.0.0
if location.get('start', False):
# upstart -- rather than being managed by one command, start/stop/restart are actual commands
self.svc_cmd = ''
elif location.get('rc-service', False):
# service is managed by OpenRC
self.svc_cmd = location['rc-service']
self.enable_cmd = location['rc-update']
return # already have service start/stop tool too!
elif self.svc_initscript:
# service is managed by with SysV init scripts
if location.get('update-rc.d', False):
# and uses update-rc.d
self.enable_cmd = location['update-rc.d']
elif location.get('insserv', None):
# and uses insserv
self.enable_cmd = location['insserv']
elif location.get('chkconfig', False):
# and uses chkconfig
self.enable_cmd = location['chkconfig']
if self.enable_cmd is None:
if self.module.params['must_exist']:
self.module.fail_json(msg="no service or tool found for: %s" % self.name)
else:
# exiting without change on non-existent service
self.module.exit_json(changed=False, exists=False)
# If no service control tool selected yet, try to see if 'service' is available
if self.svc_cmd is None and location.get('service', False):
self.svc_cmd = location['service']
# couldn't find anything yet
if self.svc_cmd is None and not self.svc_initscript:
if self.module.params['must_exist']:
self.module.fail_json(msg='cannot find \'service\' binary or init script for service, possible typo in service name?, aborting')
else:
# exiting without change on non-existent service
self.module.exit_json(changed=False, exists=False)
if location.get('initctl', False):
self.svc_initctl = location['initctl']
def get_systemd_service_enabled(self):
(rc, out, err) = self.execute_command("%s is-enabled %s" % (self.enable_cmd, self.__systemd_unit,))
if rc == 0:
return True
return False
def get_systemd_status_dict(self):
(rc, out, err) = self.execute_command("%s show %s" % (self.enable_cmd, self.__systemd_unit,))
if rc != 0:
self.module.fail_json(msg='failure %d running systemctl show for %r: %s' % (rc, self.__systemd_unit, err))
key = None
value_buffer = []
status_dict = {}
for line in out.splitlines():
if not key:
key, value = line.split('=', 1)
# systemd fields that are shell commands can be multi-line
# We take a value that begins with a "{" as the start of
# a shell command and a line that ends with "}" as the end of
# the command
if value.lstrip().startswith('{'):
if value.rstrip().endswith('}'):
status_dict[key] = value
key = None
else:
value_buffer.append(value)
else:
status_dict[key] = value
key = None
else:
if line.rstrip().endswith('}'):
status_dict[key] = '\n'.join(value_buffer)
key = None
else:
value_buffer.append(value)
return status_dict
def get_systemd_service_status(self):
d = self.get_systemd_status_dict()
if d.get('ActiveState') == 'active':
# run-once services (for which a single successful exit indicates
# that they are running as designed) should not be restarted here.
# Thus, we are not checking d['SubState'].
self.running = True
self.crashed = False
elif d.get('ActiveState') == 'failed':
self.running = False
self.crashed = True
elif d.get('ActiveState') is None:
self.module.fail_json(msg='No ActiveState value in systemctl show output for %r' % (self.__systemd_unit,))
else:
self.running = False
self.crashed = False
return self.running
def get_service_status(self):
if self.svc_cmd and self.svc_cmd.endswith('systemctl'):
return self.get_systemd_service_status()
self.action = "status"
rc, status_stdout, status_stderr = self.service_control()
# if we have decided the service is managed by upstart, we check for some additional output...
if self.svc_initctl and self.running is None:
# check the job status by upstart response
initctl_rc, initctl_status_stdout, initctl_status_stderr = self.execute_command("%s status %s" % (self.svc_initctl, self.name))
if "stop/waiting" in initctl_status_stdout:
self.running = False
elif "start/running" in initctl_status_stdout:
self.running = True
if self.svc_cmd and self.svc_cmd.endswith("rc-service") and self.running is None:
openrc_rc, openrc_status_stdout, openrc_status_stderr = self.execute_command("%s %s status" % (self.svc_cmd, self.name))
self.running = "started" in openrc_status_stdout
self.crashed = "crashed" in openrc_status_stderr
# if the job status is still not known check it by status output keywords
# Only check keywords if there's only one line of output (some init
# scripts will output verbosely in case of error and those can emit
# keywords that are picked up as false positives
if self.running is None and status_stdout.count('\n') <= 1:
# first transform the status output that could irritate keyword matching
cleanout = status_stdout.lower().replace(self.name.lower(), '')
if "stop" in cleanout:
self.running = False
elif "run" in cleanout and "not" in cleanout:
self.running = False
elif "run" in cleanout and "not" not in cleanout:
self.running = True
elif "start" in cleanout and "not" not in cleanout:
self.running = True
elif 'could not access pid file' in cleanout:
self.running = False
elif 'is dead and pid file exists' in cleanout:
self.running = False
elif 'dead but subsys locked' in cleanout:
self.running = False
elif 'dead but pid file exists' in cleanout:
self.running = False
# if the job status is still not known check it by response code
# For reference, see:
# http://refspecs.linuxbase.org/LSB_4.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html
if self.running is None:
if rc in [1, 2, 3, 4, 69]:
self.running = False
elif rc == 0:
self.running = True
# if the job status is still not known check it by special conditions
if self.running is None:
if self.name == 'iptables' and "ACCEPT" in status_stdout:
# iptables status command output is lame
# TODO: lookup if we can use a return code for this instead?
self.running = True
return self.running
def service_enable(self):
if self.enable_cmd is None:
self.module.fail_json(msg='cannot detect command to enable service %s, typo or init system potentially unknown' % self.name)
self.changed = True
action = None
#
# Upstart's initctl
#
if self.enable_cmd.endswith("initctl"):
def write_to_override_file(file_name, file_contents, ):
override_file = open(file_name, 'w')
override_file.write(file_contents)
override_file.close()
initpath = '/etc/init'
if self.upstart_version >= LooseVersion('0.6.7'):
manreg = re.compile('^manual\s*$', re.M | re.I)
config_line = 'manual\n'
else:
manreg = re.compile('^start on manual\s*$', re.M | re.I)
config_line = 'start on manual\n'
conf_file_name = "%s/%s.conf" % (initpath, self.name)
override_file_name = "%s/%s.override" % (initpath, self.name)
# Check to see if files contain the manual line in .conf and fail if True
if manreg.search(open(conf_file_name).read()):
self.module.fail_json(msg="manual stanza not supported in a .conf file")
self.changed = False
if os.path.exists(override_file_name):
override_file_contents = open(override_file_name).read()
# Remove manual stanza if present and service enabled
if self.enable and manreg.search(override_file_contents):
self.changed = True
override_state = manreg.sub('', override_file_contents)
# Add manual stanza if not present and service disabled
elif not (self.enable) and not (manreg.search(override_file_contents)):
self.changed = True
override_state = '\n'.join((override_file_contents, config_line))
# service already in desired state
else:
pass
# Add file with manual stanza if service disabled
elif not (self.enable):
self.changed = True
override_state = config_line
else:
# service already in desired state
pass
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
# The initctl method of enabling and disabling services is much
# different than for the other service methods. So actually
# committing the change is done in this conditional and then we
# skip the boilerplate at the bottom of the method
if self.changed:
try:
write_to_override_file(override_file_name, override_state)
except:
self.module.fail_json(msg='Could not modify override file')
return
#
# SysV's chkconfig
#
if self.enable_cmd.endswith("chkconfig"):
if self.enable:
action = 'on'
else:
action = 'off'
(rc, out, err) = self.execute_command("%s --list %s" % (self.enable_cmd, self.name))
if 'chkconfig --add %s' % self.name in err:
self.execute_command("%s --add %s" % (self.enable_cmd, self.name))
(rc, out, err) = self.execute_command("%s --list %s" % (self.enable_cmd, self.name))
if not self.name in out:
self.module.fail_json(msg="service %s does not support chkconfig" % self.name)
state = out.split()[-1]
# Check if we're already in the correct state
if "3:%s" % action in out and "5:%s" % action in out:
self.changed = False
return
#
# Systemd's systemctl
#
if self.enable_cmd.endswith("systemctl"):
if self.enable:
action = 'enable'
else:
action = 'disable'
# Check if we're already in the correct state
service_enabled = self.get_systemd_service_enabled()
# self.changed should already be true
if self.enable == service_enabled:
self.changed = False
return
#
# OpenRC's rc-update
#
if self.enable_cmd.endswith("rc-update"):
if self.enable:
action = 'add'
else:
action = 'delete'
(rc, out, err) = self.execute_command("%s show" % self.enable_cmd)
for line in out.splitlines():
service_name, runlevels = line.split('|')
service_name = service_name.strip()
if service_name != self.name:
continue
runlevels = re.split(r'\s+', runlevels)
# service already enabled for the runlevel
if self.enable and self.runlevel in runlevels:
self.changed = False
# service already disabled for the runlevel
elif not self.enable and self.runlevel not in runlevels:
self.changed = False
break
else:
# service already disabled altogether
if not self.enable:
self.changed = False
if not self.changed:
return
#
# update-rc.d style
#
if self.enable_cmd.endswith("update-rc.d"):
enabled = False
slinks = glob.glob('/etc/rc?.d/S??' + self.name)
if slinks:
enabled = True
if self.enable != enabled:
self.changed = True
if self.enable:
action = 'enable'
klinks = glob.glob('/etc/rc?.d/K??' + self.name)
if not klinks:
(rc, out, err) = self.execute_command("%s %s defaults" % (self.enable_cmd, self.name))
if rc != 0:
if err:
self.module.fail_json(msg=err)
else:
self.module.fail_json(msg=out) % (self.enable_cmd, self.name, action)
else:
action = 'disable'
if self.module.check_mode:
rc = 0
return
(rc, out, err) = self.execute_command("%s %s %s" % (self.enable_cmd, self.name, action))
if rc != 0:
if err:
self.module.fail_json(msg=err)
else:
self.module.fail_json(msg=out) % (self.enable_cmd, self.name, action)
else:
self.changed = False
return
#
# insserv (Debian 7)
#
if self.enable_cmd.endswith("insserv"):
if self.enable:
(rc, out, err) = self.execute_command("%s -n %s" % (self.enable_cmd, self.name))
else:
(rc, out, err) = self.execute_command("%s -nr %s" % (self.enable_cmd, self.name))
self.changed = False
for line in err.splitlines():
if self.enable and line.find('enable service') != -1:
self.changed = True
break
if not self.enable and line.find('remove service') != -1:
self.changed = True
break
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
if not self.changed:
return
if self.enable:
(rc, out, err) = self.execute_command("%s %s" % (self.enable_cmd, self.name))
if (rc != 0) or (err != ''):
self.module.fail_json(msg=("Failed to install service. rc: %s, out: %s, err: %s" % (rc, out, err)))
return (rc, out, err)
else:
(rc, out, err) = self.execute_command("%s -r %s" % (self.enable_cmd, self.name))
if (rc != 0) or (err != ''):
self.module.fail_json(msg=("Failed to remove service. rc: %s, out: %s, err: %s" % (rc, out, err)))
return (rc, out, err)
#
# If we've gotten to the end, the service needs to be updated
#
self.changed = True
# we change argument order depending on real binary used:
# rc-update and systemctl need the argument order reversed
if self.enable_cmd.endswith("rc-update"):
args = (self.enable_cmd, action, self.name + " " + self.runlevel)
elif self.enable_cmd.endswith("systemctl"):
args = (self.enable_cmd, action, self.__systemd_unit)
else:
args = (self.enable_cmd, self.name, action)
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
(rc, out, err) = self.execute_command("%s %s %s" % args)
if rc != 0:
if err:
self.module.fail_json(msg="Error when trying to %s %s: rc=%s %s" % (action, self.name, rc, err))
else:
self.module.fail_json(msg="Failure for %s %s: rc=%s %s" % (action, self.name, rc, out))
return (rc, out, err)
def service_control(self):
# Decide what command to run
svc_cmd = ''
arguments = self.arguments
if self.svc_cmd:
if not self.svc_cmd.endswith("systemctl"):
# SysV and OpenRC take the form <cmd> <name> <action>
svc_cmd = "%s %s" % (self.svc_cmd, self.name)
else:
# systemd commands take the form <cmd> <action> <name>
svc_cmd = self.svc_cmd
arguments = "%s %s" % (self.__systemd_unit, arguments)
elif self.svc_cmd is None and self.svc_initscript:
# upstart
svc_cmd = "%s" % self.svc_initscript
# In OpenRC, if a service crashed, we need to reset its status to
# stopped with the zap command, before we can start it back.
if self.svc_cmd and self.svc_cmd.endswith('rc-service') and self.action == 'start' and self.crashed:
self.execute_command("%s zap" % svc_cmd, daemonize=True)
if self.action is not "restart":
if svc_cmd != '':
# upstart or systemd or OpenRC
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (svc_cmd, self.action, arguments), daemonize=True)
else:
# SysV
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (self.action, self.name, arguments), daemonize=True)
elif self.svc_cmd and self.svc_cmd.endswith('rc-service'):
# All services in OpenRC support restart.
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (svc_cmd, self.action, arguments), daemonize=True)
else:
# In other systems, not all services support restart. Do it the hard way.
if svc_cmd != '':
# upstart or systemd
rc1, stdout1, stderr1 = self.execute_command("%s %s %s" % (svc_cmd, 'stop', arguments), daemonize=True)
else:
# SysV
rc1, stdout1, stderr1 = self.execute_command("%s %s %s" % ('stop', self.name, arguments), daemonize=True)
if self.sleep:
time.sleep(self.sleep)
if svc_cmd != '':
# upstart or systemd
rc2, stdout2, stderr2 = self.execute_command("%s %s %s" % (svc_cmd, 'start', arguments), daemonize=True)
else:
# SysV
rc2, stdout2, stderr2 = self.execute_command("%s %s %s" % ('start', self.name, arguments), daemonize=True)
# merge return information
if rc1 != 0 and rc2 == 0:
rc_state = rc2
stdout = stdout2
stderr = stderr2
else:
rc_state = rc1 + rc2
stdout = stdout1 + stdout2
stderr = stderr1 + stderr2
return(rc_state, stdout, stderr)
# ===========================================
# Subclass: FreeBSD
class FreeBsdService(Service):
"""
This is the FreeBSD Service manipulation class - it uses the /etc/rc.conf
file for controlling services started at boot and the 'service' binary to
check status and perform direct service manipulation.
"""
platform = 'FreeBSD'
distribution = None
def get_service_tools(self):
self.svc_cmd = self.module.get_bin_path('service', True)
if not self.svc_cmd:
self.module.fail_json(msg='unable to find service binary')
def get_service_status(self):
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, 'onestatus', self.arguments))
if self.name == "pf":
self.running = "Enabled" in stdout
else:
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_enable(self):
if self.enable:
self.rcconf_value = "YES"
else:
self.rcconf_value = "NO"
rcfiles = [ '/etc/rc.conf','/etc/rc.conf.local', '/usr/local/etc/rc.conf' ]
for rcfile in rcfiles:
if os.path.isfile(rcfile):
self.rcconf_file = rcfile
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, 'rcvar', self.arguments))
cmd = "%s %s %s %s" % (self.svc_cmd, self.name, 'rcvar', self.arguments)
rcvars = shlex.split(stdout, comments=True)
if not rcvars:
self.module.fail_json(msg="unable to determine rcvar", stdout=stdout, stderr=stderr)
# In rare cases, i.e. sendmail, rcvar can return several key=value pairs
# Usually there is just one, however. In other rare cases, i.e. uwsgi,
# rcvar can return extra uncommented data that is not at all related to
# the rcvar. We will just take the first key=value pair we come across
# and hope for the best.
for rcvar in rcvars:
if '=' in rcvar:
self.rcconf_key = rcvar.split('=')[0]
break
if self.rcconf_key is None:
self.module.fail_json(msg="unable to determine rcvar", stdout=stdout, stderr=stderr)
try:
return self.service_enable_rcconf()
except:
self.module.fail_json(msg='unable to set rcvar')
def service_control(self):
if self.action is "start":
self.action = "onestart"
if self.action is "stop":
self.action = "onestop"
if self.action is "reload":
self.action = "onereload"
return self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, self.action, self.arguments))
# ===========================================
# Subclass: OpenBSD
class OpenBsdService(Service):
"""
This is the OpenBSD Service manipulation class - it uses rcctl(8) or
/etc/rc.d scripts for service control. Enabling a service is
only supported if rcctl is present.
"""
platform = 'OpenBSD'
distribution = None
def get_service_tools(self):
self.enable_cmd = self.module.get_bin_path('rcctl')
if self.enable_cmd:
self.svc_cmd = self.enable_cmd
else:
rcdir = '/etc/rc.d'
rc_script = "%s/%s" % (rcdir, self.name)
if os.path.isfile(rc_script):
self.svc_cmd = rc_script
if not self.svc_cmd:
self.module.fail_json(msg='unable to find svc_cmd')
def get_service_status(self):
if self.enable_cmd:
rc, stdout, stderr = self.execute_command("%s %s %s" % (self.svc_cmd, 'check', self.name))
else:
rc, stdout, stderr = self.execute_command("%s %s" % (self.svc_cmd, 'check'))
if stderr:
self.module.fail_json(msg=stderr)
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_control(self):
if self.enable_cmd:
return self.execute_command("%s -f %s %s" % (self.svc_cmd, self.action, self.name))
else:
return self.execute_command("%s -f %s" % (self.svc_cmd, self.action))
def service_enable(self):
if not self.enable_cmd:
return super(OpenBsdService, self).service_enable()
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'getdef', self.name, 'flags'))
if stderr:
self.module.fail_json(msg=stderr)
getdef_string = stdout.rstrip()
# Depending on the service the string returned from 'getdef' may be
# either a set of flags or the boolean YES/NO
if getdef_string == "YES" or getdef_string == "NO":
default_flags = ''
else:
default_flags = getdef_string
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'get', self.name, 'flags'))
if stderr:
self.module.fail_json(msg=stderr)
get_string = stdout.rstrip()
# Depending on the service the string returned from 'get' may be
# either a set of flags or the boolean YES/NO
if get_string == "YES" or get_string == "NO":
current_flags = ''
else:
current_flags = get_string
# If there are arguments from the user we use these as flags unless
# they are already set.
if self.arguments and self.arguments != current_flags:
changed_flags = self.arguments
# If the user has not supplied any arguments and the current flags
# differ from the default we reset them.
elif not self.arguments and current_flags != default_flags:
changed_flags = ' '
# Otherwise there is no need to modify flags.
else:
changed_flags = ''
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'get', self.name, 'status'))
if self.enable:
if rc == 0 and not changed_flags:
return
if rc != 0:
status_action = "set %s status on" % (self.name)
else:
status_action = ''
if changed_flags:
flags_action = "set %s flags %s" % (self.name, changed_flags)
else:
flags_action = ''
else:
if rc == 1:
return
status_action = "set %s status off" % self.name
flags_action = ''
# Verify state assumption
if not status_action and not flags_action:
self.module.fail_json(msg="neither status_action or status_flags is set, this should never happen")
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
status_modified = 0
if status_action:
rc, stdout, stderr = self.execute_command("%s %s" % (self.enable_cmd, status_action))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg="rcctl failed to modify service status")
status_modified = 1
if flags_action:
rc, stdout, stderr = self.execute_command("%s %s" % (self.enable_cmd, flags_action))
if rc != 0:
if stderr:
if status_modified:
error_message = "rcctl modified service status but failed to set flags: " + stderr
else:
error_message = stderr
else:
if status_modified:
error_message = "rcctl modified service status but failed to set flags"
else:
error_message = "rcctl failed to modify service flags"
self.module.fail_json(msg=error_message)
self.changed = True
# ===========================================
# Subclass: NetBSD
class NetBsdService(Service):
"""
This is the NetBSD Service manipulation class - it uses the /etc/rc.conf
file for controlling services started at boot, check status and perform
direct service manipulation. Init scripts in /etc/rcd are used for
controlling services (start/stop) as well as for controlling the current
state.
"""
platform = 'NetBSD'
distribution = None
def get_service_tools(self):
initpaths = [ '/etc/rc.d' ] # better: $rc_directories - how to get in here? Run: sh -c '. /etc/rc.conf ; echo $rc_directories'
for initdir in initpaths:
initscript = "%s/%s" % (initdir,self.name)
if os.path.isfile(initscript):
self.svc_initscript = initscript
if not self.svc_initscript:
self.module.fail_json(msg='unable to find rc.d script')
def service_enable(self):
if self.enable:
self.rcconf_value = "YES"
else:
self.rcconf_value = "NO"
rcfiles = [ '/etc/rc.conf' ] # Overkill?
for rcfile in rcfiles:
if os.path.isfile(rcfile):
self.rcconf_file = rcfile
self.rcconf_key = "%s" % string.replace(self.name,"-","_")
return self.service_enable_rcconf()
def get_service_status(self):
self.svc_cmd = "%s" % self.svc_initscript
rc, stdout, stderr = self.execute_command("%s %s" % (self.svc_cmd, 'onestatus'))
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_control(self):
if self.action is "start":
self.action = "onestart"
if self.action is "stop":
self.action = "onestop"
self.svc_cmd = "%s" % self.svc_initscript
return self.execute_command("%s %s" % (self.svc_cmd, self.action), daemonize=True)
# ===========================================
# Subclass: SunOS
class SunOSService(Service):
"""
This is the SunOS Service manipulation class - it uses the svcadm
command for controlling services, and svcs command for checking status.
It also tries to be smart about taking the service out of maintenance
state if necessary.
"""
platform = 'SunOS'
distribution = None
def get_service_tools(self):
self.svcs_cmd = self.module.get_bin_path('svcs', True)
if not self.svcs_cmd:
self.module.fail_json(msg='unable to find svcs binary')
self.svcadm_cmd = self.module.get_bin_path('svcadm', True)
if not self.svcadm_cmd:
self.module.fail_json(msg='unable to find svcadm binary')
def get_service_status(self):
status = self.get_sunos_svcs_status()
# Only 'online' is considered properly running. Everything else is off
# or has some sort of problem.
if status == 'online':
self.running = True
else:
self.running = False
def get_sunos_svcs_status(self):
rc, stdout, stderr = self.execute_command("%s %s" % (self.svcs_cmd, self.name))
if rc == 1:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
lines = stdout.rstrip("\n").split("\n")
status = lines[-1].split(" ")[0]
# status is one of: online, offline, degraded, disabled, maintenance, uninitialized
# see man svcs(1)
return status
def service_enable(self):
# Get current service enablement status
rc, stdout, stderr = self.execute_command("%s -l %s" % (self.svcs_cmd, self.name))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
enabled = False
temporary = False
# look for enabled line, which could be one of:
# enabled true (temporary)
# enabled false (temporary)
# enabled true
# enabled false
for line in stdout.split("\n"):
if line.startswith("enabled"):
if "true" in line:
enabled = True
if "temporary" in line:
temporary = True
startup_enabled = (enabled and not temporary) or (not enabled and temporary)
if self.enable and startup_enabled:
return
elif (not self.enable) and (not startup_enabled):
return
# Mark service as started or stopped (this will have the side effect of
# actually stopping or starting the service)
if self.enable:
subcmd = "enable -rs"
else:
subcmd = "disable -s"
rc, stdout, stderr = self.execute_command("%s %s %s" % (self.svcadm_cmd, subcmd, self.name))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
self.changed = True
def service_control(self):
status = self.get_sunos_svcs_status()
# if starting or reloading, clear maintenace states
if self.action in ['start', 'reload', 'restart'] and status in ['maintenance', 'degraded']:
rc, stdout, stderr = self.execute_command("%s clear %s" % (self.svcadm_cmd, self.name))
if rc != 0:
return rc, stdout, stderr
status = self.get_sunos_svcs_status()
if status in ['maintenance', 'degraded']:
self.module.fail_json(msg="Failed to bring service out of %s status." % status)
if self.action == 'start':
subcmd = "enable -rst"
elif self.action == 'stop':
subcmd = "disable -st"
elif self.action == 'reload':
subcmd = "refresh"
elif self.action == 'restart' and status == 'online':
subcmd = "restart"
elif self.action == 'restart' and status != 'online':
subcmd = "enable -rst"
return self.execute_command("%s %s %s" % (self.svcadm_cmd, subcmd, self.name))
# ===========================================
# Subclass: AIX
class AIX(Service):
"""
This is the AIX Service (SRC) manipulation class - it uses lssrc, startsrc, stopsrc
and refresh for service control. Enabling a service is currently not supported.
Would require to add an entry in the /etc/inittab file (mkitab, chitab and rmitab
commands)
"""
platform = 'AIX'
distribution = None
def get_service_tools(self):
self.lssrc_cmd = self.module.get_bin_path('lssrc', True)
if not self.lssrc_cmd:
self.module.fail_json(msg='unable to find lssrc binary')
self.startsrc_cmd = self.module.get_bin_path('startsrc', True)
if not self.startsrc_cmd:
self.module.fail_json(msg='unable to find startsrc binary')
self.stopsrc_cmd = self.module.get_bin_path('stopsrc', True)
if not self.stopsrc_cmd:
self.module.fail_json(msg='unable to find stopsrc binary')
self.refresh_cmd = self.module.get_bin_path('refresh', True)
if not self.refresh_cmd:
self.module.fail_json(msg='unable to find refresh binary')
def get_service_status(self):
status = self.get_aix_src_status()
# Only 'active' is considered properly running. Everything else is off
# or has some sort of problem.
if status == 'active':
self.running = True
else:
self.running = False
def get_aix_src_status(self):
rc, stdout, stderr = self.execute_command("%s -s %s" % (self.lssrc_cmd, self.name))
if rc == 1:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
lines = stdout.rstrip("\n").split("\n")
status = lines[-1].split(" ")[-1]
# status is one of: active, inoperative
return status
def service_control(self):
if self.action == 'start':
srccmd = self.startsrc_cmd
elif self.action == 'stop':
srccmd = self.stopsrc_cmd
elif self.action == 'reload':
srccmd = self.refresh_cmd
elif self.action == 'restart':
self.execute_command("%s -s %s" % (self.stopsrc_cmd, self.name))
srccmd = self.startsrc_cmd
if self.arguments and self.action == 'start':
return self.execute_command("%s -a \"%s\" -s %s" % (srccmd, self.arguments, self.name))
else:
return self.execute_command("%s -s %s" % (srccmd, self.name))
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(choices=['running', 'started', 'stopped', 'restarted', 'reloaded']),
sleep = dict(required=False, type='int', default=None),
pattern = dict(required=False, default=None),
enabled = dict(type='bool'),
runlevel = dict(required=False, default='default'),
arguments = dict(aliases=['args'], default=''),
must_exist = dict(type='bool', default=True),
),
supports_check_mode=True
)
if module.params['state'] is None and module.params['enabled'] is None:
module.fail_json(msg="Neither 'state' nor 'enabled' set")
service = Service(module)
if service.syslogging:
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Service instantiated - platform %s' % service.platform)
if service.distribution:
syslog.syslog(syslog.LOG_NOTICE, 'Service instantiated - distribution %s' % service.distribution)
rc = 0
out = ''
err = ''
result = {}
result['name'] = service.name
# Find service management tools
service.get_service_tools()
# Enable/disable service startup at boot if requested
if service.module.params['enabled'] is not None:
# FIXME: ideally this should detect if we need to toggle the enablement state, though
# it's unlikely the changed handler would need to fire in this case so it's a minor thing.
service.service_enable()
result['enabled'] = service.enable
if module.params['state'] is None:
# Not changing the running state, so bail out now.
result['changed'] = service.changed
module.exit_json(**result)
result['state'] = service.state
# Collect service status
if service.pattern:
service.check_ps()
else:
service.get_service_status()
# Calculate if request will change service state
service.check_service_changed()
# Modify service state if necessary
(rc, out, err) = service.modify_service_state()
if rc != 0:
if err and "Job is already running" in err:
# upstart got confused, one such possibility is MySQL on Ubuntu 12.04
# where status may report it has no start/stop links and we could
# not get accurate status
pass
else:
if err:
module.fail_json(msg=err)
else:
module.fail_json(msg=out)
result['changed'] = service.changed | service.svc_change
if service.module.params['enabled'] is not None:
result['enabled'] = service.module.params['enabled']
if not service.module.params['state']:
status = service.get_service_status()
if status is None:
result['state'] = 'absent'
elif status is False:
result['state'] = 'started'
else:
result['state'] = 'stopped'
else:
# as we may have just bounced the service the service command may not
# report accurate state at this moment so just show what we ran
if service.module.params['state'] in ['started','restarted','running','reloaded']:
result['state'] = 'started'
else:
result['state'] = 'stopped'
module.exit_json(**result)
from ansible.module_utils.basic import *
main()
| gpl-3.0 | -4,609,616,486,088,734,000 | 36.927767 | 150 | 0.553125 | false |
ominux/scikit-learn | examples/cluster/plot_adjusted_for_chance_measures.py | 1 | 4105 | """
==========================================================
Adjustment for chance in clustering performance evaluation
==========================================================
The following plots demonstrate the impact of the number of clusters and
number of samples on various clustering performance evaluation metrics.
Non-adjusted measures such as the V-Measure show a dependency between
the number of clusters and the number of samples: the mean V-Measure
of random labeling increases signicantly as the number of clusters is
closer to the total number of samples used to compute the measure.
Adjusted for chance measure such as ARI display some random variations
centered around a mean score of 0.0 for any number of samples and
clusters.
Only adjusted measures can hence safely be used as a consensus index
to evaluate the average stability of clustering algorithms for a given
value of k on various overlapping sub-samples of the dataset.
"""
print __doc__
# Author: Olivier Grisel <olivier.grisel@ensta.org>
# License: Simplified BSD
import numpy as np
import pylab as pl
from sklearn import metrics
def uniform_labelings_scores(score_func, n_samples, n_clusters_range,
fixed_n_classes=None, n_runs=10, seed=42):
"""Compute score for 2 random uniform cluster labelings.
Both random labelings have the same number of clusters for each value
possible value in ``n_clusters_range``.
When fixed_n_classes is not None the first labeling is considered a ground
truth class assignement with fixed number of classes.
"""
random_labels = np.random.RandomState(seed).random_integers
scores = np.zeros((len(n_clusters_range), n_runs))
if fixed_n_classes is not None:
labels_a = random_labels(low=0, high=fixed_n_classes - 1,
size=n_samples)
for i, k in enumerate(n_clusters_range):
for j in range(n_runs):
if fixed_n_classes is None:
labels_a = random_labels(low=0, high=k - 1, size=n_samples)
labels_b = random_labels(low=0, high=k - 1, size=n_samples)
scores[i, j] = score_func(labels_a, labels_b)
return scores
score_funcs = [
metrics.adjusted_rand_score,
metrics.v_measure_score,
]
# 2 independent random clusterings with equal cluster number
n_samples = 100
n_clusters_range = np.linspace(2, n_samples, 10).astype(np.int)
pl.figure(1)
plots = []
names = []
for score_func in score_funcs:
print "Computing %s for %d values of n_clusters and n_samples=%d" % (
score_func.__name__, len(n_clusters_range), n_samples)
scores = uniform_labelings_scores(score_func, n_samples, n_clusters_range)
plots.append(pl.errorbar(
n_clusters_range, scores.mean(axis=1), scores.std(axis=1)))
names.append(score_func.__name__)
pl.title("Clustering measures for 2 random uniform labelings\n"
"with equal number of clusters")
pl.xlabel('Number of clusters (Number of samples is fixed to %d)' % n_samples)
pl.ylabel('Score value')
pl.legend(plots, names)
pl.ylim(ymin=-0.05, ymax=1.05)
pl.show()
# Random labeling with varying n_clusters against ground class labels
# with fixed number of clusters
n_samples = 1000
n_clusters_range = np.linspace(2, 100, 10).astype(np.int)
n_classes = 10
pl.figure(2)
plots = []
names = []
for score_func in score_funcs:
print "Computing %s for %d values of n_clusters and n_samples=%d" % (
score_func.__name__, len(n_clusters_range), n_samples)
scores = uniform_labelings_scores(score_func, n_samples, n_clusters_range,
fixed_n_classes=n_classes)
plots.append(pl.errorbar(
n_clusters_range, scores.mean(axis=1), scores.std(axis=1)))
names.append(score_func.__name__)
pl.title("Clustering measures for random uniform labeling\n"
"against reference assignement with %d classes" % n_classes)
pl.xlabel('Number of clusters (Number of samples is fixed to %d)' % n_samples)
pl.ylabel('Score value')
pl.ylim(ymin=-0.05, ymax=1.05)
pl.legend(plots, names)
pl.show()
| bsd-3-clause | 304,915,694,155,107,840 | 34.387931 | 78 | 0.677223 | false |