repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
jmacmahon/invenio | modules/webstyle/lib/goto_plugins/goto_plugin_cern_hr_documents.py | 3 | 7382 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2012 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This implements a redirection for CERN HR Documents in the CERN Document
Server. It's useful as a reference on how goto plugins could be implemented.
"""
import time
import re
from invenio.search_engine import perform_request_search
from invenio.search_engine_utils import get_fieldvalues
from invenio.bibdocfile import BibRecDocs, InvenioBibDocFileError
def make_cern_ssr_docname(lang, edition, modif=0):
if modif:
return "CERN_SRR_%(lang)s_ed%(edition)02d_modif%(modif)02d" % {
'lang': lang,
'edition': edition,
'modif': modif
}
else:
return "CERN_SRR_%(lang)s_ed%(edition)02d" % {
'lang': lang,
'edition': edition,
}
_RE_REVISION = re.compile(r"rev(\d\d)")
def _get_revision(docname):
"""
Return the revision in a docname. E.g.:
CERN_Circ_Op_en_02_rev01_Implementation measures.pdf -> 1
CERN_Circ_Op_en_02_rev02_Implementation measures.PDF -> 2
"""
g = _RE_REVISION.search(docname)
if g:
return int(g.group(1))
return 0
def _register_document(documents, docname, key):
"""
Register in the documents mapping the docname to key, but only if the
docname has a revision higher of the docname already associated with a key
"""
if key in documents:
if _get_revision(docname) > _get_revision(documents[key]):
documents[key] = docname
else:
documents[key] = docname
def goto(type, document='', number=0, lang='en', modif=0):
today = time.strftime('%Y-%m-%d')
if type == 'SRR':
## We would like a CERN Staff Rules and Regulations
recids = perform_request_search(cc='Staff Rules and Regulations', f="925__a:1996-01-01->%s 925__b:%s->9999-99-99" % (today, today))
recid = recids[-1]
reportnumber = get_fieldvalues(recid, '037__a')[0]
edition = int(reportnumber[-2:]) ## e.g. CERN-STAFF-RULES-ED08
return BibRecDocs(recid).get_bibdoc(make_cern_ssr_docname(lang, edition, modif)).get_file('.pdf').get_url()
elif type == "OPER-CIRC":
recids = perform_request_search(cc="Operational Circulars", p="reportnumber:\"CERN-OPER-CIRC-%s-*\"" % number, sf="925__a")
recid = recids[-1]
documents = {}
bibrecdocs = BibRecDocs(recid)
for docname in bibrecdocs.get_bibdoc_names():
ldocname = docname.lower()
if 'implementation' in ldocname:
_register_document(documents, docname, 'implementation-en')
elif 'application' in ldocname:
_register_document(documents, docname, 'implementation-fr')
elif 'archiving' in ldocname:
_register_document(documents, docname, 'archiving-en')
elif 'archivage' in ldocname:
_register_document(documents, docname, 'archiving-fr')
elif 'annexe' in ldocname or 'annexes_fr' in ldocname:
_register_document(documents, docname, 'annex-fr')
elif 'annexes_en' in ldocname or 'annex' in ldocname:
_register_document(documents, docname, 'annex-en')
elif '_en_' in ldocname or '_eng_' in ldocname or '_angl_' in ldocname:
_register_document(documents, docname, 'en')
elif '_fr_' in ldocname:
_register_document(documents, docname, 'fr')
try:
return bibrecdocs.get_bibdoc(documents[document]).get_file('.pdf').get_url()
except InvenioBibDocFileError:
return bibrecdocs.get_bibdoc(documents[document]).get_file('.PDF').get_url()
elif type == 'ADMIN-CIRC':
recids = perform_request_search(cc="Administrative Circulars", p='reportnumber:"CERN-ADMIN-CIRC-%s-*"' % number, sf="925__a")
recid = recids[-1]
documents = {}
bibrecdocs = BibRecDocs(recid)
for docname in bibrecdocs.get_bibdoc_names():
ldocname = docname.lower()
if 'implementation' in ldocname:
_register_document(documents, docname, 'implementation-en')
elif 'application' in ldocname:
_register_document(documents, docname, 'implementation-fr')
elif 'archiving' in ldocname:
_register_document(documents, docname, 'archiving-en')
elif 'archivage' in ldocname:
_register_document(documents, docname, 'archiving-fr')
elif 'annexe' in ldocname or 'annexes_fr' in ldocname:
_register_document(documents, docname, 'annex-fr')
elif 'annexes_en' in ldocname or 'annex' in ldocname:
_register_document(documents, docname, 'annex-en')
elif '_en_' in ldocname or '_eng_' in ldocname or '_angl_' in ldocname:
_register_document(documents, docname, 'en')
elif '_fr_' in ldocname:
_register_document(documents, docname, 'fr')
try:
return bibrecdocs.get_bibdoc(documents[document]).get_file('.pdf').get_url()
except InvenioBibDocFileError:
return bibrecdocs.get_bibdoc(documents[document]).get_file('.PDF').get_url()
def register_hr_redirections():
"""
Run this only once
"""
from invenio.goto_engine import register_redirection
plugin = 'goto_plugin_cern_hr_documents'
## Staff rules and regulations
for modif in range(1, 20):
for lang in ('en', 'fr'):
register_redirection('hr-srr-modif%02d-%s' % (modif, lang), plugin, parameters={'type': 'SRR', 'lang': lang, 'modif': modif})
for lang in ('en', 'fr'):
register_redirection('hr-srr-%s' % lang, plugin, parameters={'type': 'SRR', 'lang': lang, 'modif': 0})
## Operational Circulars
for number in range(1, 10):
for lang in ('en', 'fr'):
register_redirection('hr-oper-circ-%s-%s' % (number, lang), plugin, parameters={'type': 'OPER-CIRC', 'document': lang, 'number': number})
for number, special_document in ((2, 'implementation'), (2, 'annex'), (3, 'archiving'), (3, 'annex')):
for lang in ('en', 'fr'):
register_redirection('hr-circ-%s-%s-%s' % (number, special_document, lang), plugin, parameters={'type': 'OPER-CIRC', 'document': '%s-%s' % (special_document, lang), 'number': number})
## Administrative Circulars:
for number in range(1, 32):
for lang in ('en', 'fr'):
register_redirection('hr-admin-circ-%s-%s' % (number, lang), plugin, parameters={'type': 'ADMIN-CIRC', 'document': lang, 'number': number})
if __name__ == "__main__":
register_hr_redirections()
| gpl-2.0 |
ankit318/appengine-mapreduce | python/test/mapreduce/status_test.py | 12 | 20599 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
import time
import unittest
try:
import json
except ImportError:
import simplejson as json
from google.appengine.api import yaml_errors
from google.appengine.ext import db
from mapreduce import errors
from mapreduce import handlers
from mapreduce import status
from testlib import testutil
from mapreduce import test_support
from google.appengine.ext.webapp import mock_webapp
class TestKind(db.Model):
"""Used for testing."""
foobar = db.StringProperty(default="meep")
def TestMap(entity):
"""Used for testing."""
pass
class MapreduceYamlTest(unittest.TestCase):
"""Testing mapreduce.yaml-related functionality."""
def set_up_directory_tree(self, dir_tree_contents):
"""Create directory tree from dict of path:contents entries."""
for full_path, contents in dir_tree_contents.iteritems():
dir_name = os.path.dirname(full_path)
if not os.path.isdir(dir_name):
os.makedirs(dir_name)
f = open(full_path, 'w')
f.write(contents)
f.close()
def setUp(self):
"""Initialize temporary application variable."""
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
"""Remove temporary application directory."""
if self.tempdir:
shutil.rmtree(self.tempdir)
def testFindYamlFile(self):
"""Test if mapreduce.yaml can be found with different app/library trees."""
test_status = os.path.join(self.tempdir, "library_root", "google",
"appengine", "ext", "mapreduce", "status.py")
test_mapreduce_yaml = os.path.join(self.tempdir, "application_root",
"mapreduce.yaml")
test_dict = {
test_status: "test",
test_mapreduce_yaml: "test",
}
self.set_up_directory_tree(test_dict)
os.chdir(os.path.dirname(test_mapreduce_yaml))
yaml_loc = status.find_mapreduce_yaml(status_file=test_status)
self.assertTrue(os.path.samefile(test_mapreduce_yaml, yaml_loc))
def testFindYamlFileSameTree(self):
"""Test if mapreduce.yaml can be found with the same app/library tree."""
test_status = os.path.join(self.tempdir, "application_root", "google",
"appengine", "ext", "mapreduce", "status.py")
test_mapreduce_yaml = os.path.join(self.tempdir, "application_root",
"mapreduce.yaml")
test_dict = {
test_status: "test",
test_mapreduce_yaml: "test",
}
self.set_up_directory_tree(test_dict)
os.chdir(os.path.dirname(test_mapreduce_yaml))
yaml_loc = status.find_mapreduce_yaml(status_file=test_status)
self.assertEqual(test_mapreduce_yaml, yaml_loc)
def testParseEmptyFile(self):
"""Parsing empty mapreduce.yaml file."""
self.assertRaises(errors.BadYamlError,
status.parse_mapreduce_yaml,
"")
def testParse(self):
"""Parsing a single document in mapreduce.yaml."""
mr_yaml = status.parse_mapreduce_yaml(
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
" params_validator: Validator1\n"
" params:\n"
" - name: entity_kind\n"
" default: Kind1\n"
" - name: human_supplied1\n"
" - name: human_supplied2\n"
"- name: Mapreduce2\n"
" mapper:\n"
" handler: Handler2\n"
" input_reader: Reader2\n")
self.assertTrue(mr_yaml)
self.assertEquals(2, len(mr_yaml.mapreduce))
self.assertEquals("Mapreduce1", mr_yaml.mapreduce[0].name)
self.assertEquals("Handler1", mr_yaml.mapreduce[0].mapper.handler)
self.assertEquals("Reader1", mr_yaml.mapreduce[0].mapper.input_reader)
self.assertEquals("Validator1",
mr_yaml.mapreduce[0].mapper.params_validator)
self.assertEquals(3, len(mr_yaml.mapreduce[0].mapper.params))
self.assertEquals("entity_kind", mr_yaml.mapreduce[0].mapper.params[0].name)
self.assertEquals("Kind1", mr_yaml.mapreduce[0].mapper.params[0].default)
self.assertEquals("human_supplied1",
mr_yaml.mapreduce[0].mapper.params[1].name)
self.assertEquals("human_supplied2",
mr_yaml.mapreduce[0].mapper.params[2].name)
self.assertEquals("Mapreduce2", mr_yaml.mapreduce[1].name)
self.assertEquals("Handler2", mr_yaml.mapreduce[1].mapper.handler)
self.assertEquals("Reader2", mr_yaml.mapreduce[1].mapper.input_reader)
def testParseOutputWriter(self):
"""Parsing a single document in mapreduce.yaml with output writer."""
mr_yaml = status.parse_mapreduce_yaml(
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
" output_writer: Writer1\n"
)
self.assertTrue(mr_yaml)
self.assertEquals(1, len(mr_yaml.mapreduce))
self.assertEquals("Mapreduce1", mr_yaml.mapreduce[0].name)
self.assertEquals("Handler1", mr_yaml.mapreduce[0].mapper.handler)
self.assertEquals("Reader1", mr_yaml.mapreduce[0].mapper.input_reader)
self.assertEquals("Writer1", mr_yaml.mapreduce[0].mapper.output_writer)
def testParseMissingRequiredAttrs(self):
"""Test parsing with missing required attributes."""
self.assertRaises(errors.BadYamlError,
status.parse_mapreduce_yaml,
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n")
self.assertRaises(errors.BadYamlError,
status.parse_mapreduce_yaml,
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" input_reader: Reader1\n")
def testBadValues(self):
"""Tests when some yaml values are of the wrong type."""
self.assertRaises(errors.BadYamlError,
status.parse_mapreduce_yaml,
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
" params:\n"
" - name: $$Invalid$$\n")
def testMultipleDocuments(self):
"""Tests when multiple documents are present."""
self.assertRaises(errors.BadYamlError,
status.parse_mapreduce_yaml,
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
"---")
def testOverlappingNames(self):
"""Tests when there are jobs with the same name."""
self.assertRaises(errors.BadYamlError,
status.parse_mapreduce_yaml,
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n")
def testToDict(self):
"""Tests encoding the MR document as JSON."""
mr_yaml = status.parse_mapreduce_yaml(
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
" params_validator: Validator1\n"
" params:\n"
" - name: entity_kind\n"
" default: Kind1\n"
" - name: human_supplied1\n"
" - name: human_supplied2\n"
"- name: Mapreduce2\n"
" mapper:\n"
" handler: Handler2\n"
" input_reader: Reader2\n")
all_configs = status.MapReduceYaml.to_dict(mr_yaml)
self.assertEquals(
[
{
'name': 'Mapreduce1',
'mapper_params_validator': 'Validator1',
'mapper_params': {
'entity_kind': 'Kind1',
'human_supplied2': None,
'human_supplied1': None},
'mapper_handler': 'Handler1',
'mapper_input_reader': 'Reader1'
},
{
'mapper_input_reader': 'Reader2',
'mapper_handler': 'Handler2',
'name': 'Mapreduce2'
}
], all_configs)
def testToDictOutputWriter(self):
"""Tests encoding the MR document with output writer as JSON."""
mr_yaml = status.parse_mapreduce_yaml(
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
" output_writer: Writer1\n"
)
all_configs = status.MapReduceYaml.to_dict(mr_yaml)
self.assertEquals(
[
{
'name': 'Mapreduce1',
'mapper_handler': 'Handler1',
'mapper_input_reader': 'Reader1',
'mapper_output_writer': 'Writer1',
},
], all_configs)
class ResourceTest(testutil.HandlerTestBase):
"""Tests for the resource handler."""
def setUp(self):
"""Sets up the test harness."""
testutil.HandlerTestBase.setUp(self)
self.handler = status.ResourceHandler()
self.handler.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.handler.request.path = "/mapreduce/path"
def testPaths(self):
"""Tests that paths are accessible."""
self.handler.get("status")
self.assertTrue(self.handler.response.out.getvalue().startswith(
"<!DOCTYPE html>"))
self.assertEquals("text/html",
self.handler.response.headers["Content-Type"])
self.handler.response.out.truncate(0)
self.handler.get("jquery.js")
self.assertTrue(self.handler.response.out.getvalue().startswith(
"/*!"))
self.assertEquals("text/javascript",
self.handler.response.headers["Content-Type"])
def testCachingHeaders(self):
"""Tests that caching headers are correct."""
self.handler.get("status")
self.assertEquals("public; max-age=300",
self.handler.response.headers["Cache-Control"])
def testMissing(self):
"""Tests when a resource is requested that doesn't exist."""
self.handler.get("unknown")
self.assertEquals(404, self.handler.response.status)
class ListConfigsTest(testutil.HandlerTestBase):
"""Tests for the ListConfigsHandler."""
def setUp(self):
"""Sets up the test harness."""
testutil.HandlerTestBase.setUp(self)
self.handler = status.ListConfigsHandler()
self.handler.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.handler.request.path = "/mapreduce/command/path"
self.handler.request.headers["X-Requested-With"] = "XMLHttpRequest"
def testCSRF(self):
"""Test that we check the X-Requested-With header."""
del self.handler.request.headers["X-Requested-With"]
self.handler.get()
self.assertEquals(403, self.handler.response.status)
def testBasic(self):
"""Tests listing available configs."""
old_get_yaml = status.get_mapreduce_yaml
status.get_mapreduce_yaml = lambda: status.parse_mapreduce_yaml(
"mapreduce:\n"
"- name: Mapreduce1\n"
" mapper:\n"
" handler: Handler1\n"
" input_reader: Reader1\n"
" params_validator: Validator1\n"
" params:\n"
" - name: entity_kind\n"
" default: Kind1\n"
" - name: human_supplied1\n"
" - name: human_supplied2\n"
"- name: Mapreduce2\n"
" mapper:\n"
" handler: Handler2\n"
" input_reader: Reader2\n"
" params_validator: MapreduceValidator\n"
" params:\n"
" - name: foo\n"
" value: bar\n")
try:
self.handler.get()
finally:
status.get_mapreduce_yaml = old_get_yaml
self.assertEquals(
{u'configs': [
{u'mapper_params_validator': u'Validator1',
u'mapper_params': {
u'entity_kind': u'Kind1',
u'human_supplied2': None,
u'human_supplied1': None},
u'mapper_input_reader': u'Reader1',
u'mapper_handler': u'Handler1',
u'name': u'Mapreduce1'},
{u'mapper_input_reader': u'Reader2',
u'mapper_handler': u'Handler2',
u'name': u'Mapreduce2',
u'params': {
u'foo': u'bar',},
}]},
json.loads(self.handler.response.out.getvalue()))
self.assertEquals("text/javascript",
self.handler.response.headers["Content-Type"])
class ListJobsTest(testutil.HandlerTestBase):
"""Tests listing active and inactive jobs."""
def setUp(self):
"""Sets up the test harness."""
testutil.HandlerTestBase.setUp(self)
self.start = handlers.StartJobHandler()
self.start.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.start.request.path = "/mapreduce/command/start"
self.start.request.set(
"mapper_input_reader",
"mapreduce.input_readers.DatastoreInputReader")
self.start.request.set("mapper_handler", "__main__.TestMap")
self.start.request.set("mapper_params.entity_kind", "__main__.TestKind")
self.start.request.headers["X-Requested-With"] = "XMLHttpRequest"
self.handler = status.ListJobsHandler()
self.handler.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.handler.request.path = "/mapreduce/command/list"
self.handler.request.headers["X-Requested-With"] = "XMLHttpRequest"
def testCSRF(self):
"""Test that we check the X-Requested-With header."""
TestKind().put()
del self.start.request.headers["X-Requested-With"]
self.start.post()
self.assertEquals(403, self.start.response.status)
del self.handler.request.headers["X-Requested-With"]
self.handler.get()
self.assertEquals(403, self.handler.response.status)
def testBasic(self):
"""Tests when there are fewer than the max results to render."""
TestKind().put()
self.start.request.set("name", "my job 1")
self.start.post()
time.sleep(.1)
self.start.request.set("name", "my job 2")
self.start.post()
time.sleep(.1)
self.start.request.set("name", "my job 3")
self.start.post()
self.handler.get()
result = json.loads(self.handler.response.out.getvalue())
expected_args = set([
"active",
"active_shards",
"chart_url",
"chart_width",
"mapreduce_id",
"name",
"shards",
"start_timestamp_ms",
"updated_timestamp_ms",
])
self.assertEquals(3, len(result["jobs"]))
self.assertEquals("my job 3", result["jobs"][0]["name"])
self.assertEquals("my job 2", result["jobs"][1]["name"])
self.assertEquals("my job 1", result["jobs"][2]["name"])
self.assertEquals(expected_args, set(result["jobs"][0].keys()))
self.assertEquals(expected_args, set(result["jobs"][1].keys()))
self.assertEquals(expected_args, set(result["jobs"][2].keys()))
def testCursor(self):
"""Tests when a job cursor is present."""
TestKind().put()
self.start.request.set("name", "my job 1")
self.start.post()
time.sleep(.1) # Can not start two jobs before time advances
self.start.request.set("name", "my job 2")
self.start.post()
self.handler.request.set("count", "1")
self.handler.get()
result = json.loads(self.handler.response.out.getvalue())
self.assertEquals(1, len(result["jobs"]))
self.assertTrue("cursor" in result)
self.handler.response.out.truncate(0)
self.handler.request.set("count", "1")
self.handler.request.set("cursor", result['cursor'])
self.handler.get()
result2 = json.loads(self.handler.response.out.getvalue())
self.assertEquals(1, len(result2["jobs"]))
self.assertFalse("cursor" in result2)
def testNoJobs(self):
"""Tests when there are no jobs."""
self.handler.get()
result = json.loads(self.handler.response.out.getvalue())
self.assertEquals({'jobs': []}, result)
class GetJobDetailTest(testutil.HandlerTestBase):
"""Tests listing job status detail."""
def setUp(self):
"""Sets up the test harness."""
testutil.HandlerTestBase.setUp(self)
for _ in range(100):
TestKind().put()
self.start = handlers.StartJobHandler()
self.start.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.start.request.path = "/mapreduce/command/start"
self.start.request.set("name", "my job 1")
self.start.request.set(
"mapper_input_reader",
"mapreduce.input_readers.DatastoreInputReader")
self.start.request.set("mapper_handler", "__main__.TestMap")
self.start.request.set("mapper_params.entity_kind", "__main__.TestKind")
self.start.request.headers["X-Requested-With"] = "XMLHttpRequest"
self.start.post()
result = json.loads(self.start.response.out.getvalue())
self.mapreduce_id = result["mapreduce_id"]
self.handler = status.GetJobDetailHandler()
self.handler.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.handler.request.path = "/mapreduce/command/list"
self.handler.request.headers["X-Requested-With"] = "XMLHttpRequest"
def KickOffMapreduce(self):
"""Executes pending kickoff task."""
test_support.execute_all_tasks(self.taskqueue)
def testCSRF(self):
"""Test that we check the X-Requested-With header."""
del self.handler.request.headers["X-Requested-With"]
self.handler.get()
self.assertEquals(403, self.handler.response.status)
def testBasic(self):
"""Tests getting the job details."""
self.KickOffMapreduce()
self.handler.request.set("mapreduce_id", self.mapreduce_id)
self.handler.get()
result = json.loads(self.handler.response.out.getvalue())
expected_keys = set([
"active", "chart_url", "counters", "mapper_spec", "mapreduce_id",
"name", "result_status", "shards", "start_timestamp_ms",
"updated_timestamp_ms", "params", "hooks_class_name", "chart_width"])
expected_shard_keys = set([
"active", "counters", "last_work_item", "result_status",
"shard_description", "shard_id", "shard_number",
"updated_timestamp_ms"])
self.assertEquals(expected_keys, set(result.keys()))
self.assertEquals(8, len(result["shards"]))
self.assertEquals(expected_shard_keys, set(result["shards"][0].keys()))
def testBeforeKickOff(self):
"""Tests getting the job details."""
self.handler.request.set("mapreduce_id", self.mapreduce_id)
self.handler.get()
result = json.loads(self.handler.response.out.getvalue())
expected_keys = set([
"active", "chart_url", "counters", "mapper_spec", "mapreduce_id",
"name", "result_status", "shards", "start_timestamp_ms",
"updated_timestamp_ms", "params", "hooks_class_name", "chart_width"])
self.assertEquals(expected_keys, set(result.keys()))
def testBadJobId(self):
"""Tests when an invalid job ID is supplied."""
self.handler.request.set("mapreduce_id", "does not exist")
self.handler.get()
result = json.loads(self.handler.response.out.getvalue())
self.assertEquals(
{"error_message": "\"Could not find job with ID 'does not exist'\"",
"error_class": "KeyError"},
result)
# TODO(user): Add tests for abort
# TODO(user): Add tests for cleanup
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
katsikas/gnuradio | grc/gui/BlockTreeWindow.py | 7 | 7639 | """
Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
from Constants import DEFAULT_BLOCKS_WINDOW_WIDTH, DND_TARGETS
import Utils
import pygtk
pygtk.require('2.0')
import gtk
import gobject
NAME_INDEX = 0
KEY_INDEX = 1
DOC_INDEX = 2
DOC_MARKUP_TMPL="""\
#if $doc
$encode($doc)#slurp
#else
undocumented#slurp
#end if"""
CAT_MARKUP_TMPL="""Category: $cat"""
class BlockTreeWindow(gtk.VBox):
"""The block selection panel."""
def __init__(self, platform, get_flow_graph):
"""
BlockTreeWindow constructor.
Create a tree view of the possible blocks in the platform.
The tree view nodes will be category names, the leaves will be block names.
A mouse double click or button press action will trigger the add block event.
@param platform the particular platform will all block prototypes
@param get_flow_graph get the selected flow graph
"""
gtk.VBox.__init__(self)
self.platform = platform
self.get_flow_graph = get_flow_graph
#make the tree model for holding blocks
self.treestore = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
self.treeview = gtk.TreeView(self.treestore)
self.treeview.set_enable_search(False) #disable pop up search box
self.treeview.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.treeview.connect('button-press-event', self._handle_mouse_button_press)
selection = self.treeview.get_selection()
selection.set_mode('single')
selection.connect('changed', self._handle_selection_change)
renderer = gtk.CellRendererText()
column = gtk.TreeViewColumn('Blocks', renderer, text=NAME_INDEX)
self.treeview.append_column(column)
#setup the search
self.treeview.set_enable_search(True)
self.treeview.set_search_equal_func(self._handle_search)
#try to enable the tooltips (available in pygtk 2.12 and above)
try: self.treeview.set_tooltip_column(DOC_INDEX)
except: pass
#setup drag and drop
self.treeview.enable_model_drag_source(gtk.gdk.BUTTON1_MASK, DND_TARGETS, gtk.gdk.ACTION_COPY)
self.treeview.connect('drag-data-get', self._handle_drag_get_data)
#make the scrolled window to hold the tree view
scrolled_window = gtk.ScrolledWindow()
scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
scrolled_window.add_with_viewport(self.treeview)
scrolled_window.set_size_request(DEFAULT_BLOCKS_WINDOW_WIDTH, -1)
self.pack_start(scrolled_window)
#add button
self.add_button = gtk.Button(None, gtk.STOCK_ADD)
self.add_button.connect('clicked', self._handle_add_button)
self.pack_start(self.add_button, False)
#map categories to iters, automatic mapping for root
self._categories = {tuple(): None}
#add blocks and categories
self.platform.load_block_tree(self)
#initialize
self._update_add_button()
############################################################
## Block Tree Methods
############################################################
def add_block(self, category, block=None):
"""
Add a block with category to this selection window.
Add only the category when block is None.
@param category the category list or path string
@param block the block object or None
"""
if isinstance(category, str): category = category.split('/')
category = tuple(filter(lambda x: x, category)) #tuple is hashable
#add category and all sub categories
for i, cat_name in enumerate(category):
sub_category = category[:i+1]
if sub_category not in self._categories:
iter = self.treestore.insert_before(self._categories[sub_category[:-1]], None)
self.treestore.set_value(iter, NAME_INDEX, '[ %s ]'%cat_name)
self.treestore.set_value(iter, KEY_INDEX, '')
self.treestore.set_value(iter, DOC_INDEX, Utils.parse_template(CAT_MARKUP_TMPL, cat=cat_name))
self._categories[sub_category] = iter
#add block
if block is None: return
iter = self.treestore.insert_before(self._categories[category], None)
self.treestore.set_value(iter, NAME_INDEX, block.get_name())
self.treestore.set_value(iter, KEY_INDEX, block.get_key())
self.treestore.set_value(iter, DOC_INDEX, Utils.parse_template(DOC_MARKUP_TMPL, doc=block.get_doc()))
############################################################
## Helper Methods
############################################################
def _get_selected_block_key(self):
"""
Get the currently selected block key.
@return the key of the selected block or a empty string
"""
selection = self.treeview.get_selection()
treestore, iter = selection.get_selected()
return iter and treestore.get_value(iter, KEY_INDEX) or ''
def _update_add_button(self):
"""
Update the add button's sensitivity.
The button should be active only if a block is selected.
"""
key = self._get_selected_block_key()
self.add_button.set_sensitive(bool(key))
def _add_selected_block(self):
"""
Add the selected block with the given key to the flow graph.
"""
key = self._get_selected_block_key()
if key: self.get_flow_graph().add_new_block(key)
############################################################
## Event Handlers
############################################################
def _handle_search(self, model, column, key, iter):
#determine which blocks match the search key
blocks = self.get_flow_graph().get_parent().get_blocks()
matching_blocks = filter(lambda b: key in b.get_key() or key in b.get_name().lower(), blocks)
#remove the old search category
try: self.treestore.remove(self._categories.pop((self._search_category, )))
except (KeyError, AttributeError): pass #nothing to remove
#create a search category
if not matching_blocks: return
self._search_category = 'Search: %s'%key
for block in matching_blocks: self.add_block(self._search_category, block)
#expand the search category
path = self.treestore.get_path(self._categories[(self._search_category, )])
self.treeview.collapse_all()
self.treeview.expand_row(path, open_all=False)
def _handle_drag_get_data(self, widget, drag_context, selection_data, info, time):
"""
Handle a drag and drop by setting the key to the selection object.
This will call the destination handler for drag and drop.
Only call set when the key is valid to ignore DND from categories.
"""
key = self._get_selected_block_key()
if key: selection_data.set(selection_data.target, 8, key)
def _handle_mouse_button_press(self, widget, event):
"""
Handle the mouse button press.
If a left double click is detected, call add selected block.
"""
if event.button == 1 and event.type == gtk.gdk._2BUTTON_PRESS:
self._add_selected_block()
def _handle_selection_change(self, selection):
"""
Handle a selection change in the tree view.
If a selection changes, set the add button sensitive.
"""
self._update_add_button()
def _handle_add_button(self, widget):
"""
Handle the add button clicked signal.
Call add selected block.
"""
self._add_selected_block()
| gpl-3.0 |
tsgit/invenio | modules/bibauthorid/lib/bibauthorid_rabbit.py | 2 | 10561 | from operator import itemgetter
from itertools import cycle, imap, chain, izip
from invenio.bibauthorid_name_utils import compare_names as comp_names, \
create_matchable_name
from invenio import bibauthorid_config as bconfig
from invenio.bibauthorid_backinterface import get_authors_by_name, \
add_signature, get_signatures_of_paper, \
remove_signatures, modify_signature, filter_bibrecs_outside, get_deleted_papers, \
create_new_author_by_signature as new_person_from_signature, get_all_valid_bibrecs, \
remove_papers, get_author_refs_of_paper,\
get_coauthor_refs_of_paper, get_name_by_bibref, \
get_author_by_external_id, update_canonical_names_of_authors, \
update_external_ids_of_authors, remove_empty_authors
from invenio.bibauthorid_matrix_optimization import maximized_mapping
from invenio.bibauthorid_dbinterface import populate_partial_marc_caches
from invenio.bibauthorid_dbinterface import destroy_partial_marc_caches
from invenio.bibauthorid_general_utils import memoized
from invenio.bibtask import task_update_progress
from datetime import datetime
from invenio.dbquery import run_sql
from invenio.bibauthorid_logutils import Logger
now = datetime.now
USE_EXT_IDS = bconfig.RABBIT_USE_EXTERNAL_IDS
EXT_IDS_TO_USE = bconfig.RABBIT_EXTERNAL_IDS_TO_USE
if USE_EXT_IDS:
external_id_getters = list()
if 'InspireID' in EXT_IDS_TO_USE:
from invenio.bibauthorid_backinterface import get_inspire_id_of_signature
external_id_getters.append(get_inspire_id_of_signature)
if 'OrcidID' in EXT_IDS_TO_USE:
from invenio.bibauthorid_backinterface import get_orcid_id_of_signature
external_id_getters.append(get_orcid_id_of_signature)
if 'KAKEN' in EXT_IDS_TO_USE:
from invenio.bibauthorid_backinterface import get_kaken_id_of_signature
external_id_getters.append(get_kaken_id_of_signature)
M_NAME_PIDS_CACHE = None
# The first element of this list is the master function
M_NAME_FUNCTIONS = [create_matchable_name]
def populate_mnames_pids_cache():
global M_NAME_PIDS_CACHE
mnames_pids = run_sql("select distinct(m_name), personid from aidPERSONIDPAPERS where flag>-2")
M_NAME_PIDS_CACHE = dict(mnames_pids)
def destroy_mnames_pids_cache():
global M_NAME_PIDS_CACHE
M_NAME_PIDS_CACHE = None
def rabbit(bibrecs=None, check_invalid_papers=False,
personids_to_update_extids=None, verbose=False):
logger = Logger("Rabbit")
if verbose:
logger.verbose = True
if not bibrecs:
logger.log("Running on all records")
else:
logger.log("Running on %s " % (str(bibrecs)))
populate_mnames_pids_cache()
global M_NAME_PIDS_CACHE
memoized_compare_names = memoized(comp_names)
compare_names = lambda x, y: memoized_compare_names(*sorted((x, y)))
def find_pids_by_matchable_name_with_cache(matchable_name):
try:
matched_pids = [M_NAME_PIDS_CACHE[matchable_name]]
except KeyError:
matched_pids = get_authors_by_name(matchable_name,
use_matchable_name=True)
if matched_pids:
M_NAME_PIDS_CACHE[matchable_name] = matched_pids[0]
return matched_pids
if USE_EXT_IDS:
def get_matched_pids_by_external_ids(sig, rec, pids_having_rec):
'''
This function returns all the matched pids after iterating
through all available external IDs of the system.
'''
for get_external_id_of_signature in external_id_getters:
external_id = get_external_id_of_signature(sig + (rec,))
if external_id:
matched_pids = list(get_author_by_external_id(external_id[0]))
if matched_pids and int(matched_pids[0][0]) in pids_having_rec:
matched_pids = list()
return matched_pids
threshold = 0.8
if not bibrecs or check_invalid_papers:
all_bibrecs = get_all_valid_bibrecs()
if not bibrecs:
bibrecs = all_bibrecs
if check_invalid_papers:
filter_bibrecs_outside(all_bibrecs)
updated_pids = set()
deleted = frozenset(p[0] for p in get_deleted_papers())
bibrecs = list(bibrecs)
for idx, rec in enumerate(bibrecs):
logger.log("Considering %s" % str(rec))
if idx % 100 == 0:
task_update_progress("%d/%d current: %d" % (idx, len(bibrecs), rec))
if idx % 1000 == 0:
destroy_partial_marc_caches()
populate_partial_marc_caches(bibrecs[idx: idx + 1000])
logger.log(float(idx) / len(bibrecs), "%d/%d" % (idx, len(bibrecs)))
if rec in deleted:
remove_papers([rec])
continue
author_refs = get_author_refs_of_paper(rec)
coauthor_refs = get_coauthor_refs_of_paper(rec)
markrefs = frozenset(chain(izip(cycle([100]), imap(itemgetter(0),
author_refs)),
izip(cycle([700]), imap(itemgetter(0),
coauthor_refs))))
personid_rows = [map(int, row[:3]) + [row[4]]
for row in get_signatures_of_paper(rec)]
personidrefs_names = dict(((row[1], row[2]), row[3])
for row in personid_rows)
personidrefs = frozenset(personidrefs_names.keys())
new_signatures = list(markrefs - personidrefs)
old_signatures = list(personidrefs - markrefs)
new_signatures_names = dict((new, get_name_by_bibref(new))
for new in new_signatures)
# matrix |new_signatures| X |old_signatures|
matrix = [[compare_names(new_signatures_names[new],
personidrefs_names[old])
for old in old_signatures] for new in new_signatures]
logger.log(" - Deleted signatures: %s" % str(old_signatures))
logger.log(" - Added signatures: %s" % str(new_signatures))
logger.log(" - Matrix: %s" % str(matrix))
#[new_signatures, old_signatures]
best_match = [(new_signatures[new], old_signatures[old])
for new, old, score in maximized_mapping(matrix)
if score > threshold]
logger.log(" - Best match: %s " % str(best_match))
for new, old in best_match:
logger.log(" - - Moving signature: %s on %s to %s as %s" %
(old, rec, new, new_signatures_names[new]))
modify_signature(old, rec, new, new_signatures_names[new])
remove_signatures(tuple(list(old) + [rec]) for old in old_signatures)
not_matched = frozenset(new_signatures) - frozenset(map(itemgetter(0),
best_match))
remaining_personid_rows = ([x for x in personid_rows
if x[1:3] in old_signatures])
pids_having_rec = set([int(row[0]) for row in remaining_personid_rows])
logger.log(" - Not matched: %s" % str(not_matched))
if not_matched:
used_pids = set(r[0] for r in personid_rows)
for sig in not_matched:
name = new_signatures_names[sig]
matchable_name = create_matchable_name(name)
matched_pids = list()
if USE_EXT_IDS:
matched_pids = get_matched_pids_by_external_ids(sig, rec, pids_having_rec)
if matched_pids:
add_signature(list(sig) + [rec], name,
matched_pids[0][0], m_name=matchable_name)
M_NAME_PIDS_CACHE[matchable_name] = matched_pids[0][0]
updated_pids.add(matched_pids[0][0])
pids_having_rec.add(matched_pids[0][0])
continue
matched_pids = find_pids_by_matchable_name_with_cache(matchable_name)
if not matched_pids:
for matching_function in M_NAME_FUNCTIONS[1:]:
matchable_name = matching_function(name)
matched_pids = find_pids_by_matchable_name_with_cache(matchable_name)
if matched_pids:
break
matched_pids = [p for p in matched_pids if int(p) not in used_pids]
best_matched_pid = None
for matched_pid in matched_pids:
# Because of the wrongly labeled data in the db, all
# of the possible choices have to be checked. If one of the
# coauthors, who had his signature already considered, claimed
# in the past one of the signatures of currently considered
# author, the algorithm will think that two signatures belong
# to the same person, and, will create an unnecessary new
# profile.
if not int(matched_pid) in pids_having_rec:
best_matched_pid = matched_pid
break
if not best_matched_pid:
new_pid = new_person_from_signature(list(sig) + [rec],
name, matchable_name)
M_NAME_PIDS_CACHE[matchable_name] = new_pid
used_pids.add(new_pid)
updated_pids.add(new_pid)
else:
add_signature(list(sig) + [rec], name,
best_matched_pid, m_name=matchable_name)
M_NAME_PIDS_CACHE[matchable_name] = best_matched_pid
used_pids.add(best_matched_pid)
updated_pids.add(best_matched_pid)
pids_having_rec.add(best_matched_pid)
logger.log('Finished with %s' % str(rec))
logger.update_status_final()
destroy_partial_marc_caches()
if personids_to_update_extids:
updated_pids |= set(personids_to_update_extids)
if updated_pids: # an empty set will update all canonical_names
update_canonical_names_of_authors(updated_pids)
update_external_ids_of_authors(updated_pids,
limit_to_claimed_papers=bconfig.LIMIT_EXTERNAL_IDS_COLLECTION_TO_CLAIMED_PAPERS,
force_cache_tables=True)
destroy_partial_marc_caches()
destroy_mnames_pids_cache()
remove_empty_authors()
task_update_progress("Done!")
| gpl-2.0 |
stosdev/zebra-supervisor | judge/models/profile.py | 1 | 1441 | # -*- coding: utf-8 -*-
"""Module containing judge user profiles and various utilities."""
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
@python_2_unicode_compatible
class Profile(models.Model):
"""The users profile class."""
user = models.OneToOneField(User, related_name='profile')
institute_name = models.CharField(_("Institute name"), max_length=255,
blank=True, null=True)
team_name = models.CharField(_("Team name"), max_length=255, blank=True,
null=True)
room_number = models.CharField(_("Room number"), max_length=10, blank=True,
null=True)
computer_number = models.CharField(_("Computer number"), max_length=10,
blank=True, null=True)
class Meta:
verbose_name = _("Profile")
verbose_name_plural = _("Profiles")
app_label = 'judge'
def __str__(self):
return u"{}".format(self.user.username)
def create_profile(sender, instance, created, **kwargs):
"""Create an empty profile as soon as a user is created."""
if created:
Profile.objects.create(user=instance)
post_save.connect(create_profile, sender=User)
| gpl-3.0 |
MDPvis/rlpy | rlpy/Representations/IndependentDiscretization.py | 4 | 2174 | """Independent Discretization"""
from .Representation import Representation
import numpy as np
__copyright__ = "Copyright 2013, RLPy http://acl.mit.edu/RLPy"
__credits__ = ["Alborz Geramifard", "Robert H. Klein", "Christoph Dann",
"William Dabney", "Jonathan P. How"]
__license__ = "BSD 3-Clause"
__author__ = "Alborz Geramifard"
class IndependentDiscretization(Representation):
"""
Creates a feature for each discrete bin in each dimension; the feature
vector for a given state is comprised of binary features, where only the
single feature in a particular dimension is 1, all others 0.
I.e., in a particular state, the sum of all elements of a feature vector
equals the number of dimensions in the state space.
Note that This is the minimum number of binary features required to
uniquely represent a state in a given finite discrete domain.
"""
def __init__(self, domain, discretization=20):
self.setBinsPerDimension(domain, discretization)
self.features_num = int(sum(self.bins_per_dim))
self.maxFeatureIDperDimension = np.cumsum(self.bins_per_dim) - 1
super(
IndependentDiscretization,
self).__init__(
domain,
discretization)
def phi_nonTerminal(self, s):
F_s = np.zeros(
self.features_num,
'bool')
F_s[self.activeInitialFeatures(s)] = 1
return F_s
def getDimNumber(self, f):
# Returns the dimension number corresponding to this feature
dim = np.searchsorted(self.maxFeatureIDperDimension, f)
return dim
def getFeatureName(self, feat_id):
if hasattr(self.domain, 'DimNames'):
dim = np.searchsorted(self.maxFeatureIDperDimension, feat_id)
# Find the index of the feature in the corresponding dimension
index_in_dim = feat_id
if dim != 0:
index_in_dim = feat_id - self.maxFeatureIDperDimension[dim - 1]
print self.domain.DimNames[dim]
f_name = self.domain.DimNames[dim] + '=' + str(index_in_dim)
def featureType(self):
return bool
| bsd-3-clause |
johndpope/tensorflow | tensorflow/python/summary/writer/event_file_writer.py | 104 | 5848 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Writes events to disk in a logdir."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import threading
import time
import six
from tensorflow.core.util import event_pb2
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.platform import gfile
from tensorflow.python.util import compat
class EventFileWriter(object):
"""Writes `Event` protocol buffers to an event file.
The `EventFileWriter` class creates an event file in the specified directory,
and asynchronously writes Event protocol buffers to the file. The Event file
is encoded using the tfrecord format, which is similar to RecordIO.
"""
def __init__(self, logdir, max_queue=10, flush_secs=120,
filename_suffix=None):
"""Creates a `EventFileWriter` and an event file to write to.
On construction the summary writer creates a new event file in `logdir`.
This event file will contain `Event` protocol buffers, which are written to
disk via the add_event method.
The other arguments to the constructor control the asynchronous writes to
the event file:
* `flush_secs`: How often, in seconds, to flush the added summaries
and events to disk.
* `max_queue`: Maximum number of summaries or events pending to be
written to disk before one of the 'add' calls block.
Args:
logdir: A string. Directory where event file will be written.
max_queue: Integer. Size of the queue for pending events and summaries.
flush_secs: Number. How often, in seconds, to flush the
pending events and summaries to disk.
filename_suffix: A string. Every event file's name is suffixed with
`filename_suffix`.
"""
self._logdir = logdir
if not gfile.IsDirectory(self._logdir):
gfile.MakeDirs(self._logdir)
self._event_queue = six.moves.queue.Queue(max_queue)
self._ev_writer = pywrap_tensorflow.EventsWriter(
compat.as_bytes(os.path.join(self._logdir, "events")))
self._flush_secs = flush_secs
self._sentinel_event = self._get_sentinel_event()
if filename_suffix:
self._ev_writer.InitWithSuffix(compat.as_bytes(filename_suffix))
self._closed = False
self._worker = _EventLoggerThread(self._event_queue, self._ev_writer,
self._flush_secs, self._sentinel_event)
self._worker.start()
def _get_sentinel_event(self):
"""Generate a sentinel event for terminating worker."""
return event_pb2.Event()
def get_logdir(self):
"""Returns the directory where event file will be written."""
return self._logdir
def reopen(self):
"""Reopens the EventFileWriter.
Can be called after `close()` to add more events in the same directory.
The events will go into a new events file.
Does nothing if the EventFileWriter was not closed.
"""
if self._closed:
self._worker = _EventLoggerThread(self._event_queue, self._ev_writer,
self._flush_secs, self._sentinel_event)
self._worker.start()
self._closed = False
def add_event(self, event):
"""Adds an event to the event file.
Args:
event: An `Event` protocol buffer.
"""
if not self._closed:
self._event_queue.put(event)
def flush(self):
"""Flushes the event file to disk.
Call this method to make sure that all pending events have been written to
disk.
"""
self._event_queue.join()
self._ev_writer.Flush()
def close(self):
"""Flushes the event file to disk and close the file.
Call this method when you do not need the summary writer anymore.
"""
self.add_event(self._sentinel_event)
self.flush()
self._worker.join()
self._ev_writer.Close()
self._closed = True
class _EventLoggerThread(threading.Thread):
"""Thread that logs events."""
def __init__(self, queue, ev_writer, flush_secs, sentinel_event):
"""Creates an _EventLoggerThread.
Args:
queue: A Queue from which to dequeue events.
ev_writer: An event writer. Used to log brain events for
the visualizer.
flush_secs: How often, in seconds, to flush the
pending file to disk.
sentinel_event: A sentinel element in queue that tells this thread to
terminate.
"""
threading.Thread.__init__(self)
self.daemon = True
self._queue = queue
self._ev_writer = ev_writer
self._flush_secs = flush_secs
# The first event will be flushed immediately.
self._next_event_flush_time = 0
self._sentinel_event = sentinel_event
def run(self):
while True:
event = self._queue.get()
if event is self._sentinel_event:
self._queue.task_done()
break
try:
self._ev_writer.WriteEvent(event)
# Flush the event writer every so often.
now = time.time()
if now > self._next_event_flush_time:
self._ev_writer.Flush()
# Do it again in two minutes.
self._next_event_flush_time = now + self._flush_secs
finally:
self._queue.task_done()
| apache-2.0 |
backmari/moose | python/chigger/tests/line/line.py | 6 | 1115 | #!/usr/bin/env python
#pylint: disable=missing-docstring
#################################################################
# DO NOT MODIFY THIS HEADER #
# MOOSE - Multiphysics Object Oriented Simulation Environment #
# #
# (c) 2010 Battelle Energy Alliance, LLC #
# ALL RIGHTS RESERVED #
# #
# Prepared by Battelle Energy Alliance, LLC #
# Under Contract No. DE-AC07-05ID14517 #
# With the U. S. Department of Energy #
# #
# See COPYRIGHT for full restrictions #
#################################################################
import chigger
line = chigger.graphs.Line(x=[0,1], y=[2,4])
graph = chigger.graphs.Graph(line)
window = chigger.RenderWindow(graph, size=[300,300], test=True)
window.write('line.png')
window.start()
| lgpl-2.1 |
phobson/bokeh | scripts/version_update.py | 1 | 2576 | import os
import re
import sys
def check_input(new_ver):
""" Ensure that user input matches the format X.X.X """
pat = r'\d+.\d+.\d+'
if not re.match(pat, new_ver):
print("The new version must be in the format X.X.X (ex. '0.6.0')")
return True
def version_update(new_ver, file_array):
""" Replace existing version/release number in an array of files
with a user-supplied version number (new_ver)"""
pat = r"""(release|version)([\" ][:=] [\"\'])(\d+.\d+.\d+)([\"\'])"""
# List that will contain any files where the version number was successfully replaced
replaced = []
# Set as false until a match is found and replaced in the loop below
early_ver = False
for ver_file in file_array:
f = open(ver_file)
text = f.read()
matchObj = re.search(pat, text)
f.close()
if matchObj:
early_ver = matchObj.group(3)
f = open(ver_file, 'w')
text = re.sub(pat, r'\g<1>\g<2>%s\g<4>' % new_ver, text)
f.write(text)
replaced.append(ver_file)
else:
print("Unable to find version number matching expected format 'X.X.X' in %s" % ver_file)
if early_ver:
print("Version number changed from %s to %s in \n%s" % (early_ver, new_ver, replaced))
def version_add(new_ver, file_array):
"""Add last version number in an array of files
with a user-supplied last version number"""
for ver_file in file_array:
with open(ver_file, "r") as f:
flines = f.readlines()
for i, line in enumerate(flines):
if "ALL_VERSIONS" in line:
all_vers = flines[i]
begin, end = all_vers.split("[")
all_vers = begin + "['{}', ".format(new_ver) + end
flines[i] = all_vers
with open(ver_file, "w") as f:
f.writelines(flines)
print("Version number {new_ver} added in {ver_file}".format(new_ver=new_ver, ver_file=ver_file))
if __name__ == '__main__':
if not len(sys.argv) == 3:
print("Please provide the new version number and the previous one.")
sys.exit(1)
os.chdir('../')
files_to_update = ['bokehjs/src/coffee/version.coffee', 'bokehjs/package.json']
files_to_add = ['sphinx/source/conf.py']
updated_version = sys.argv[1]
last_version = sys.argv[2]
if check_input(updated_version):
sys.exit(1)
version_update(updated_version, files_to_update)
version_add(updated_version, files_to_add)
| bsd-3-clause |
pigeonflight/strider-plone | docker/appengine/lib/django-1.2/django/contrib/gis/tests/test_geoip.py | 290 | 4204 | import os, unittest
from django.db import settings
from django.contrib.gis.geos import GEOSGeometry
from django.contrib.gis.utils import GeoIP, GeoIPException
# Note: Requires use of both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoIP.dat' and
# 'GeoLiteCity.dat'.
class GeoIPTest(unittest.TestCase):
def test01_init(self):
"Testing GeoIP initialization."
g1 = GeoIP() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP(path, 0) # Passing in data path explicitly.
g3 = GeoIP.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertEqual(True, bool(g._country))
self.assertEqual(True, bool(g._city))
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLiteCity.dat')
cntry = os.path.join(path, 'GeoIP.dat')
g4 = GeoIP(city, country='')
self.assertEqual(None, g4._country)
g5 = GeoIP(cntry, city='')
self.assertEqual(None, g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
self.assertRaises(GeoIPException, GeoIP, cache=bad)
if isinstance(bad, basestring):
e = GeoIPException
else:
e = TypeError
self.assertRaises(e, GeoIP, bad, 0)
def test02_bad_query(self):
"Testing GeoIP query parameter checking."
cntry_g = GeoIP(city='<foo>')
# No city database available, these calls should fail.
self.assertRaises(GeoIPException, cntry_g.city, 'google.com')
self.assertRaises(GeoIPException, cntry_g.coords, 'yahoo.com')
# Non-string query should raise TypeError
self.assertRaises(TypeError, cntry_g.country_code, 17)
self.assertRaises(TypeError, cntry_g.country_name, GeoIP)
def test03_country(self):
"Testing GeoIP country querying methods."
g = GeoIP(city='<foo>')
fqdn = 'www.google.com'
addr = '12.215.42.19'
for query in (fqdn, addr):
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
def test04_city(self):
"Testing GeoIP city querying methods."
g = GeoIP(country='<foo>')
addr = '130.80.29.3'
fqdn = 'chron.com'
for query in (fqdn, addr):
# Country queries should still work.
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
# City information dictionary.
d = g.city(query)
self.assertEqual('USA', d['country_code3'])
self.assertEqual('Houston', d['city'])
self.assertEqual('TX', d['region'])
self.assertEqual(713, d['area_code'])
geom = g.geos(query)
self.failIf(not isinstance(geom, GEOSGeometry))
lon, lat = (-95.3670, 29.7523)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 4)
self.assertAlmostEqual(lat, tup[1], 4)
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(GeoIPTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
| mit |
jonathan-beard/edx-platform | lms/djangoapps/courseware/tests/tests.py | 115 | 6821 | """
Test for LMS courseware app.
"""
from textwrap import dedent
from unittest import TestCase
from django.core.urlresolvers import reverse
import mock
from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from courseware.tests.helpers import LoginEnrollmentTestCase
from xmodule.modulestore.tests.django_utils import TEST_DATA_XML_MODULESTORE as XML_MODULESTORE
from xmodule.modulestore.tests.django_utils import TEST_DATA_MIXED_TOY_MODULESTORE as TOY_MODULESTORE
from lms.djangoapps.lms_xblock.field_data import LmsFieldData
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
@attr('shard_1')
class ActivateLoginTest(LoginEnrollmentTestCase):
"""
Test logging in and logging out.
"""
def setUp(self):
super(ActivateLoginTest, self).setUp()
self.setup_user()
def test_activate_login(self):
"""
Test login -- the setup function does all the work.
"""
pass
def test_logout(self):
"""
Test logout -- setup function does login.
"""
self.logout()
class PageLoaderTestCase(LoginEnrollmentTestCase):
"""
Base class that adds a function to load all pages in a modulestore.
"""
def check_all_pages_load(self, course_key):
"""
Assert that all pages in the course load correctly.
`course_id` is the ID of the course to check.
"""
store = modulestore()
# Enroll in the course before trying to access pages
course = store.get_course(course_key)
self.enroll(course, True)
# Search for items in the course
items = store.get_items(course_key)
if len(items) < 1:
self.fail('Could not retrieve any items from course')
# Try to load each item in the course
for descriptor in items:
if descriptor.location.category == 'about':
self._assert_loads('about_course',
{'course_id': course_key.to_deprecated_string()},
descriptor)
elif descriptor.location.category == 'static_tab':
kwargs = {'course_id': course_key.to_deprecated_string(),
'tab_slug': descriptor.location.name}
self._assert_loads('static_tab', kwargs, descriptor)
elif descriptor.location.category == 'course_info':
self._assert_loads('info', {'course_id': course_key.to_deprecated_string()},
descriptor)
else:
kwargs = {'course_id': course_key.to_deprecated_string(),
'location': descriptor.location.to_deprecated_string()}
self._assert_loads('jump_to', kwargs, descriptor,
expect_redirect=True,
check_content=True)
def _assert_loads(self, django_url, kwargs, descriptor,
expect_redirect=False,
check_content=False):
"""
Assert that the url loads correctly.
If expect_redirect, then also check that we were redirected.
If check_content, then check that we don't get
an error message about unavailable modules.
"""
url = reverse(django_url, kwargs=kwargs)
response = self.client.get(url, follow=True)
if response.status_code != 200:
self.fail('Status %d for page %s' %
(response.status_code, descriptor.location))
if expect_redirect:
self.assertEqual(response.redirect_chain[0][1], 302)
if check_content:
self.assertNotContains(response, "this module is temporarily unavailable")
self.assertNotIsInstance(descriptor, ErrorDescriptor)
@attr('shard_1')
class TestXmlCoursesLoad(ModuleStoreTestCase, PageLoaderTestCase):
"""
Check that all pages in test courses load properly from XML.
"""
MODULESTORE = XML_MODULESTORE
def setUp(self):
super(TestXmlCoursesLoad, self).setUp()
self.setup_user()
def test_toy_course_loads(self):
# Load one of the XML based courses
# Our test mapping rules allow the MixedModuleStore
# to load this course from XML, not Mongo.
self.check_all_pages_load(SlashSeparatedCourseKey('edX', 'toy', '2012_Fall'))
@attr('shard_1')
class TestMongoCoursesLoad(ModuleStoreTestCase, PageLoaderTestCase):
"""
Check that all pages in test courses load properly from Mongo.
"""
MODULESTORE = TOY_MODULESTORE
def setUp(self):
super(TestMongoCoursesLoad, self).setUp()
self.setup_user()
@mock.patch('xmodule.course_module.requests.get')
def test_toy_textbooks_loads(self, mock_get):
mock_get.return_value.text = dedent("""
<?xml version="1.0"?><table_of_contents>
<entry page="5" page_label="ii" name="Table of Contents"/>
</table_of_contents>
""").strip()
location = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall').make_usage_key('course', '2012_Fall')
course = self.store.get_item(location)
self.assertGreater(len(course.textbooks), 0)
@attr('shard_1')
class TestDraftModuleStore(ModuleStoreTestCase):
def test_get_items_with_course_items(self):
store = modulestore()
# fix was to allow get_items() to take the course_id parameter
store.get_items(SlashSeparatedCourseKey('abc', 'def', 'ghi'), qualifiers={'category': 'vertical'})
# test success is just getting through the above statement.
# The bug was that 'course_id' argument was
# not allowed to be passed in (i.e. was throwing exception)
@attr('shard_1')
class TestLmsFieldData(TestCase):
"""
Tests of the LmsFieldData class
"""
def test_lms_field_data_wont_nest(self):
# Verify that if an LmsFieldData is passed into LmsFieldData as the
# authored_data, that it doesn't produced a nested field data.
#
# This fixes a bug where re-use of the same descriptor for many modules
# would cause more and more nesting, until the recursion depth would be
# reached on any attribute access
# pylint: disable=protected-access
base_authored = mock.Mock()
base_student = mock.Mock()
first_level = LmsFieldData(base_authored, base_student)
second_level = LmsFieldData(first_level, base_student)
self.assertEquals(second_level._authored_data, first_level._authored_data)
self.assertNotIsInstance(second_level._authored_data, LmsFieldData)
| agpl-3.0 |
hariseldon99/archives | dtwa_ising_longrange/dtwa_ising_longrange/redirect_stdout.py | 2 | 1292 | import os
import sys
import contextlib
def fileno(file_or_fd):
fd = getattr(file_or_fd, 'fileno', lambda: file_or_fd)()
if not isinstance(fd, int):
raise ValueError("Expected a file (`.fileno()`) or a file descriptor")
return fd
@contextlib.contextmanager
def stdout_redirected(to=os.devnull, stdout=None):
"""
http://stackoverflow.com/a/22434262/190597 (J.F. Sebastian)
"""
if stdout is None:
stdout = sys.stdout
stdout_fd = fileno(stdout)
# copy stdout_fd before it is overwritten
#NOTE: `copied` is inheritable on Windows when duplicating a standard stream
with os.fdopen(os.dup(stdout_fd), 'wb') as copied:
stdout.flush() # flush library buffers that dup2 knows nothing about
try:
os.dup2(fileno(to), stdout_fd) # $ exec >&to
except ValueError: # filename
with open(to, 'wb') as to_file:
os.dup2(to_file.fileno(), stdout_fd) # $ exec > to
try:
yield stdout # allow code to be run with the redirected stdout
finally:
# restore stdout to its previous value
#NOTE: dup2 makes stdout_fd inheritable unconditionally
stdout.flush()
os.dup2(copied.fileno(), stdout_fd) # $ exec >&copied
| gpl-2.0 |
ashray/VTK-EVM | ThirdParty/Twisted/twisted/internet/wxsupport.py | 60 | 1445 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""Old method of wxPython support for Twisted.
twisted.internet.wxreactor is probably a better choice.
To use::
| # given a wxApp instance called myWxAppInstance:
| from twisted.internet import wxsupport
| wxsupport.install(myWxAppInstance)
Use Twisted's APIs for running and stopping the event loop, don't use
wxPython's methods.
On Windows the Twisted event loop might block when dialogs are open
or menus are selected.
Maintainer: Itamar Shtull-Trauring
"""
import warnings
warnings.warn("wxsupport is not fully functional on Windows, wxreactor is better.")
# wxPython imports
from wxPython.wx import wxApp
# twisted imports
from twisted.internet import reactor
from twisted.python.runtime import platformType
class wxRunner:
"""Make sure GUI events are handled."""
def __init__(self, app):
self.app = app
def run(self):
"""
Execute pending WX events followed by WX idle events and
reschedule.
"""
# run wx events
while self.app.Pending():
self.app.Dispatch()
# run wx idle events
self.app.ProcessIdle()
reactor.callLater(0.02, self.run)
def install(app):
"""Install the wxPython support, given a wxApp instance"""
runner = wxRunner(app)
reactor.callLater(0.02, runner.run)
__all__ = ["install"]
| bsd-3-clause |
nwjs/chromium.src | third_party/android_platform/development/scripts/stack_core.py | 2 | 24484 | #!/usr/bin/env python
#
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""stack symbolizes native crash dumps."""
import itertools
import logging
import multiprocessing
import os
import re
import struct
import subprocess
import sys
import time
import zipfile
import symbol
from pylib import constants
UNKNOWN = '<unknown>'
HEAP = '[heap]'
STACK = '[stack]'
_DEFAULT_JOBS=8
_CHUNK_SIZE = 1000
_BASE_APK = 'base.apk'
_FALLBACK_SO = 'libchrome.so'
# pylint: disable=line-too-long
_ABI_LINE = re.compile('ABI: \'(?P<abi>[a-z0-9A-Z]+)\'')
_PROCESS_INFO_LINE = re.compile('(pid: [0-9]+, tid: [0-9]+.*)')
# Same as above, but used to extract the pid.
_PROCESS_INFO_PID = re.compile('pid: ([0-9]+)')
_SIGNAL_LINE = re.compile('(signal [0-9]+ \(.*\).*)')
_REGISTER_LINE = re.compile('(([ ]*[0-9a-z]{2} [0-9a-f]{8}){4})')
_THREAD_LINE = re.compile('(.*)(\-\-\- ){15}\-\-\-')
_DALVIK_JNI_THREAD_LINE = re.compile("(\".*\" prio=[0-9]+ tid=[0-9]+ NATIVE.*)")
_DALVIK_NATIVE_THREAD_LINE = re.compile("(\".*\" sysTid=[0-9]+ nice=[0-9]+.*)")
_JAVA_STDERR_LINE = re.compile("([0-9]+)\s+[0-9]+\s+.\s+System.err:\s*(.+)")
_MISC_HEADER = re.compile(
'(?:Tombstone written to:|Abort message:|Revision:|Build fingerprint:).*')
# Matches LOG(FATAL) lines, like the following example:
# [FATAL:source_file.cc(33)] Check failed: !instances_.empty()
_LOG_FATAL_LINE = re.compile('(\[FATAL\:.*\].*)$')
# Note that both trace and value line matching allow for variable amounts of
# whitespace (e.g. \t). This is because the we want to allow for the stack
# tool to operate on AndroidFeedback provided system logs. AndroidFeedback
# strips out double spaces that are found in tombsone files and logcat output.
#
# Examples of matched trace lines include lines from tombstone files like:
# #00 pc 001cf42e /data/data/com.my.project/lib/libmyproject.so
# #00 pc 001cf42e /data/data/com.my.project/lib/libmyproject.so (symbol)
# Or lines from AndroidFeedback crash report system logs like:
# 03-25 00:51:05.520 I/DEBUG ( 65): #00 pc 001cf42e /data/data/com.my.project/lib/libmyproject.so
# Please note the spacing differences.
_TRACE_LINE = re.compile(
'(.*)\#(?P<frame>[0-9]+)[ \t]+(..)[ \t]+(0x)?(?P<address>[0-9a-f]{0,16})[ \t]+(?P<lib>[^\r\n \t]*)(?P<symbol_present> \((?P<symbol_name>.*)\))?'
)
# Matches lines emitted by src/base/debug/stack_trace_android.cc, like:
# #00 0x7324d92d /data/app-lib/org.chromium.native_test-1/libbase.cr.so+0x0006992d
# This pattern includes the unused named capture groups <symbol_present> and
# <symbol_name> so that it can interoperate with the |_TRACE_LINE| regex.
_DEBUG_TRACE_LINE = re.compile('(.*)(?P<frame>\#[0-9]+ 0x[0-9a-f]{8,16}) '
'(?P<lib>[^+]+)\+0x(?P<address>[0-9a-f]{8,16})'
'(?P<symbol_present>)(?P<symbol_name>)')
# Examples of matched value lines include:
# bea4170c 8018e4e9 /data/data/com.my.project/lib/libmyproject.so
# bea4170c 8018e4e9 /data/data/com.my.project/lib/libmyproject.so (symbol)
# 03-25 00:51:05.530 I/DEBUG ( 65): bea4170c 8018e4e9 /data/data/com.my.project/lib/libmyproject.so
# Again, note the spacing differences.
_VALUE_LINE = re.compile(
'(.*)([0-9a-f]{8,16})[ \t]+([0-9a-f]{8,16})[ \t]+([^\r\n \t]*)( \((.*)\))?')
# Lines from 'code around' sections of the output will be matched before
# value lines because otheriwse the 'code around' sections will be confused as
# value lines.
#
# Examples include:
# 801cf40c ffffc4cc 00b2f2c5 00b2f1c7 00c1e1a8
# 03-25 00:51:05.530 I/DEBUG ( 65): 801cf40c ffffc4cc 00b2f2c5 00b2f1c7 00c1e1a8
_CODE_LINE = re.compile('(.*)[ \t]*[a-f0-9]{8,16}[ \t]*[a-f0-9]{8,16}' +
'[ \t]*[a-f0-9]{8,16}[ \t]*[a-f0-9]{8,16}' +
'[ \t]*[a-f0-9]{8,16}[ \t]*[ \r\n]')
# This pattern is used to find shared library offset in APK.
# Example:
# (offset 0x568000)
_SHARED_LIB_OFFSET_IN_APK = re.compile(' \(offset 0x(?P<offset>[0-9a-f]{0,16})\)')
# pylint: enable=line-too-long
def PrintTraceLines(trace_lines):
"""Print back trace."""
maxlen = min(80, max(map(lambda tl: len(tl[1]), trace_lines)))
print
print 'Stack Trace:'
print ' RELADDR ' + 'FUNCTION'.ljust(maxlen) + ' FILE:LINE'
for tl in trace_lines:
(addr, symbol_with_offset, location) = tl
normalized = os.path.normpath(location)
print ' %8s %s %s' % (addr, symbol_with_offset.ljust(maxlen), normalized)
return
def PrintValueLines(value_lines):
"""Print stack data values."""
maxlen = min(80, max(map(lambda tl: len(tl[2]), value_lines)))
print
print 'Stack Data:'
print ' ADDR VALUE ' + 'FUNCTION'.ljust(maxlen) + ' FILE:LINE'
for vl in value_lines:
(addr, value, symbol_with_offset, location) = vl
print ' %8s %8s %s %s' % (addr, value, symbol_with_offset.ljust(maxlen),
location)
return
def PrintJavaLines(java_lines):
"""Print java stderr lines."""
print
print('Java stderr from crashing pid '
'(may identify underlying Java exception):')
for l in java_lines:
if l.startswith('at'):
print ' ',
print l
def PrintOutput(trace_lines, value_lines, java_lines, more_info):
if trace_lines:
PrintTraceLines(trace_lines)
if value_lines:
# TODO(cjhopman): it seems that symbol.SymbolInformation always fails to
# find information for addresses in value_lines in chrome libraries, and so
# value_lines have little value to us and merely clutter the output.
# Since information is sometimes contained in these lines (from system
# libraries), don't completely disable them.
if more_info:
PrintValueLines(value_lines)
if java_lines:
PrintJavaLines(java_lines)
def PrintDivider():
print
print '-----------------------------------------------------\n'
def StreamingConvertTrace(_, load_vaddrs, more_info, fallback_monochrome,
arch_defined, llvm_symbolizer, apks_directory):
"""Symbolize stacks on the fly as they are read from an input stream."""
if fallback_monochrome:
global _FALLBACK_SO
_FALLBACK_SO = 'libmonochrome.so'
useful_lines = []
so_dirs = []
in_stack = False
def ConvertStreamingChunk():
logging.info("Stack found. Symbolizing...")
if so_dirs:
UpdateLibrarySearchPath(so_dirs)
# if arch isn't defined in command line, find it from log
if not arch_defined:
arch = _FindAbi(useful_lines)
if arch:
print 'Symbolizing stack using ABI=' + arch
symbol.ARCH = arch
ResolveCrashSymbol(list(useful_lines), more_info, llvm_symbolizer)
preprocessor = PreProcessLog(load_vaddrs, apks_directory)
for line in iter(sys.stdin.readline, b''):
print line,
maybe_line, maybe_so_dir = preprocessor([line])
useful_lines.extend(maybe_line)
so_dirs.extend(maybe_so_dir)
if in_stack:
if not maybe_line:
ConvertStreamingChunk()
so_dirs = []
useful_lines = []
in_stack = False
else:
if _TRACE_LINE.match(line) or _DEBUG_TRACE_LINE.match(line) or \
_VALUE_LINE.match(line) or _CODE_LINE.match(line):
in_stack = True
if in_stack:
ConvertStreamingChunk()
def ConvertTrace(lines, load_vaddrs, more_info, fallback_monochrome,
arch_defined, llvm_symbolizer, apks_directory):
"""Convert strings containing native crash to a stack."""
if fallback_monochrome:
global _FALLBACK_SO
_FALLBACK_SO = 'libmonochrome.so'
start = time.time()
chunks = [lines[i: i+_CHUNK_SIZE] for i in xrange(0, len(lines), _CHUNK_SIZE)]
use_multiprocessing = len(chunks) > 1 and (
os.environ.get('STACK_DISABLE_ASYNC') != '1')
if use_multiprocessing:
pool = multiprocessing.Pool(processes=_DEFAULT_JOBS)
results = pool.map(PreProcessLog(load_vaddrs, apks_directory), chunks)
else:
results = map(PreProcessLog(load_vaddrs, apks_directory), chunks)
useful_log = []
so_dirs = []
for result in results:
useful_log += result[0]
so_dirs += result[1]
if use_multiprocessing:
pool.close()
pool.join()
end = time.time()
logging.debug('Finished processing. Elapsed time: %.4fs', (end - start))
if so_dirs:
UpdateLibrarySearchPath(so_dirs)
# if arch isn't defined in command line, find it from log
if not arch_defined:
arch = _FindAbi(useful_log)
if arch:
print 'Symbolizing stack using ABI:', arch
symbol.ARCH = arch
ResolveCrashSymbol(list(useful_log), more_info, llvm_symbolizer)
end = time.time()
logging.debug('Finished resolving symbols. Elapsed time: %.4fs',
(end - start))
class PreProcessLog:
"""Closure wrapper, for multiprocessing.Pool.map."""
def __init__(self, load_vaddrs, apks_directory):
"""Bind load_vaddrs to the PreProcessLog closure.
Args:
load_vaddrs: LOAD segment min_vaddrs keyed on mapped executable
"""
self._load_vaddrs = load_vaddrs
self._apks_directory = apks_directory
# This is mapping from apk's offset to shared libraries.
self._shared_libraries_mapping = dict()
# The list of directires in which instead of default output dir,
# the shared libraries is found.
self._so_dirs = []
def _DetectSharedLibrary(self, lib, symbol_present):
"""Detect the possible shared library from the mapping offset of APK
Return:
the shared library in APK if only one is found.
"""
offset_match = _SHARED_LIB_OFFSET_IN_APK.match(symbol_present)
if not offset_match:
return
offset = offset_match.group('offset')
key = '%s:%s' % (lib, offset)
if self._shared_libraries_mapping.has_key(key):
soname = self._shared_libraries_mapping[key]
else:
soname, host_so = _FindSharedLibraryFromAPKs(constants.GetOutDirectory(),
self._apks_directory,
int(offset, 16))
if soname:
self._shared_libraries_mapping[key] = soname
so_dir = os.path.dirname(host_so)
# Store the directory if it is not the default output dir, so
# we can update library search path in main process.
if not os.path.samefile(constants.GetOutDirectory(), so_dir):
self._so_dirs.append(so_dir)
logging.info('Detected: %s is %s which is loaded directly from APK.',
host_so, soname)
return soname
def _AdjustAddress(self, address, lib):
"""Add the vaddr of the library's first LOAD segment to address.
Args:
address: symbol address as a hexadecimal string
lib: path to loaded library
Returns:
address+load_vaddrs[key] if lib ends with /key, otherwise address
"""
for key, offset in self._load_vaddrs.iteritems():
if lib.endswith('/' + key):
# Add offset to address, and return the result as a hexadecimal string
# with the same number of digits as the original. This allows the
# caller to make a direct textual substitution.
return ('%%0%dx' % len(address)) % (int(address, 16) + offset)
return address
def __call__(self, lines):
"""Preprocess the strings, only keep the useful ones.
Args:
lines: a list of byte strings read from logcat
Returns:
A list of unicode strings related to native crash
"""
useful_log = []
for ln in lines:
line = unicode(ln, errors='ignore')
if (_PROCESS_INFO_LINE.search(line)
or _SIGNAL_LINE.search(line)
or _REGISTER_LINE.search(line)
or _THREAD_LINE.search(line)
or _DALVIK_JNI_THREAD_LINE.search(line)
or _DALVIK_NATIVE_THREAD_LINE.search(line)
or _LOG_FATAL_LINE.search(line)
or _DEBUG_TRACE_LINE.search(line)
or _ABI_LINE.search(line)
or _JAVA_STDERR_LINE.search(line)
or _MISC_HEADER.search(line)):
useful_log.append(line)
continue
match = _TRACE_LINE.match(line)
if match:
lib, symbol_present = match.group('lib', 'symbol_present')
extension = os.path.splitext(lib)[1]
if extension == '.so' and '.apk!' in lib:
# For Android Q+, where trace lines have "...base.apk!libchrome.so",
# convert the ! to a / so that the line parses like a conventional
# library line.
line = line.replace('.apk!', '.apk/')
elif extension == '.apk' and symbol_present:
soname = self._DetectSharedLibrary(lib, symbol_present)
if soname:
line = line.replace('/' + os.path.basename(lib), '/' + soname)
elif not self._apks_directory:
# If the trace line suggests a direct load from APK, replace the
# APK name with _FALLBACK_SO, unless an APKs directory was
# explicitly specified (in which case, the correct .so should always
# be identified, and using a fallback could be misleading).
line = line.replace('/' + _BASE_APK, '/' + _FALLBACK_SO)
logging.debug("Can't detect shared library in APK, fallback to" +
" library " + _FALLBACK_SO)
# For trace lines specifically, the address may need to be adjusted
# to account for relocation packing. This is because debuggerd on
# pre-M platforms does not understand non-zero vaddr LOAD segments.
address, lib = match.group('address', 'lib')
adjusted_address = self._AdjustAddress(address, lib)
useful_log.append(line.replace(address, adjusted_address, 1))
continue
if _CODE_LINE.match(line):
# Code lines should be ignored. If this were excluded the 'code around'
# sections would trigger value_line matches.
continue
if _VALUE_LINE.match(line):
useful_log.append(line)
return useful_log, self._so_dirs
def ResolveCrashSymbol(lines, more_info, llvm_symbolizer):
"""Convert unicode strings which contains native crash to a stack
"""
trace_lines = []
value_lines = []
last_frame = -1
pid = -1
# Collects all java exception lines, keyed by pid for later output during
# native crash handling.
java_stderr_by_pid = {}
for line in lines:
java_stderr_match = _JAVA_STDERR_LINE.search(line)
if java_stderr_match:
pid, msg = java_stderr_match.groups()
java_stderr_by_pid.setdefault(pid, []).append(msg)
for line in lines:
# AndroidFeedback adds zero width spaces into its crash reports. These
# should be removed or the regular expresssions will fail to match.
process_header = _PROCESS_INFO_LINE.search(line)
signal_header = _SIGNAL_LINE.search(line)
register_header = _REGISTER_LINE.search(line)
thread_header = _THREAD_LINE.search(line)
dalvik_jni_thread_header = _DALVIK_JNI_THREAD_LINE.search(line)
dalvik_native_thread_header = _DALVIK_NATIVE_THREAD_LINE.search(line)
log_fatal_header = _LOG_FATAL_LINE.search(line)
misc_header = _MISC_HEADER.search(line)
if (process_header or signal_header or register_header or thread_header or
dalvik_jni_thread_header or dalvik_native_thread_header or
log_fatal_header or misc_header):
if trace_lines or value_lines:
java_lines = []
if pid != -1 and pid in java_stderr_by_pid:
java_lines = java_stderr_by_pid[pid]
PrintOutput(trace_lines, value_lines, java_lines, more_info)
PrintDivider()
trace_lines = []
value_lines = []
last_frame = -1
pid = -1
if process_header:
# Track the last reported pid to find java exceptions.
pid = _PROCESS_INFO_PID.search(process_header.group(1)).group(1)
print process_header.group(1)
if signal_header:
print signal_header.group(1)
if register_header:
print register_header.group(1)
if thread_header:
print thread_header.group(1)
if dalvik_jni_thread_header:
print dalvik_jni_thread_header.group(1)
if dalvik_native_thread_header:
print dalvik_native_thread_header.group(1)
if log_fatal_header:
print log_fatal_header.group(1)
if misc_header:
print misc_header.group(0)
continue
match = _TRACE_LINE.match(line) or _DEBUG_TRACE_LINE.match(line)
if match:
frame, code_addr, area, _, symbol_name = match.group(
'frame', 'address', 'lib', 'symbol_present', 'symbol_name')
logging.debug('Found trace line: %s' % line.strip())
if frame <= last_frame and (trace_lines or value_lines):
java_lines = []
if pid != -1 and pid in java_stderr_by_pid:
java_lines = java_stderr_by_pid[pid]
PrintOutput(trace_lines, value_lines, java_lines, more_info)
PrintDivider()
trace_lines = []
value_lines = []
pid = -1
last_frame = frame
if area == UNKNOWN or area == HEAP or area == STACK:
trace_lines.append((code_addr, '', area))
else:
logging.debug('Identified lib: %s' % area)
# If a calls b which further calls c and c is inlined to b, we want to
# display "a -> b -> c" in the stack trace instead of just "a -> c"
# To use llvm symbolizer, the hexadecimal address has to start with 0x.
info = llvm_symbolizer.GetSymbolInformation(
os.path.join(symbol.SYMBOLS_DIR, symbol.TranslateLibPath(area)),
'0x' + code_addr)
logging.debug('symbol information: %s' % info)
nest_count = len(info) - 1
for source_symbol, source_location in info:
if nest_count > 0:
nest_count = nest_count - 1
trace_lines.append(('v------>', source_symbol, source_location))
elif '<UNKNOWN>' in source_symbol and symbol_name:
# If the symbolizer couldn't find a symbol name, but the trace had
# one, use what the trace had.
trace_lines.append((code_addr, symbol_name, source_location))
else:
trace_lines.append((code_addr,
source_symbol,
source_location))
match = _VALUE_LINE.match(line)
if match:
(_, addr, value, area, _, symbol_name) = match.groups()
if area == UNKNOWN or area == HEAP or area == STACK or not area:
value_lines.append((addr, value, '', area))
else:
info = llvm_symbolizer.GetSymbolInformation(
os.path.join(symbol.SYMBOLS_DIR, symbol.TranslateLibPath(area)),
'0x' + value)
source_symbol, source_location = info.pop()
value_lines.append((addr,
value,
source_symbol,
source_location))
java_lines = []
if pid != -1 and pid in java_stderr_by_pid:
java_lines = java_stderr_by_pid[pid]
PrintOutput(trace_lines, value_lines, java_lines, more_info)
def UpdateLibrarySearchPath(so_dirs):
# All dirs in so_dirs must be same, since a dir represents the cpu arch.
so_dir = set(so_dirs)
so_dir_len = len(so_dir)
if so_dir_len > 0:
if so_dir_len > 1:
raise Exception("Found different so dirs, they are %s", repr(so_dir))
else:
search_path = so_dir.pop()
logging.info("Search libraries in %s", search_path)
symbol.SetSecondaryAbiOutputPath(search_path)
def GetUncompressedSharedLibraryFromAPK(apkname, offset):
"""Check if there is uncompressed shared library at specifc offset of APK."""
FILE_NAME_LEN_OFFSET = 26
FILE_NAME_OFFSET = 30
soname = ""
sosize = 0
with zipfile.ZipFile(apkname, 'r') as apk:
for infoList in apk.infolist():
_, file_extension = os.path.splitext(infoList.filename)
if (file_extension == '.so' and
infoList.file_size == infoList.compress_size):
with open(apkname, 'rb') as f:
f.seek(infoList.header_offset + FILE_NAME_LEN_OFFSET)
file_name_len = struct.unpack('H', f.read(2))[0]
extra_field_len = struct.unpack('H', f.read(2))[0]
file_offset = (infoList.header_offset + FILE_NAME_OFFSET +
file_name_len + extra_field_len)
f.seek(file_offset)
if offset == file_offset and f.read(4) == "\x7fELF":
soname = infoList.filename.replace('crazy.', '')
sosize = infoList.file_size
break
return soname, sosize
def _GetSharedLibraryInHost(soname, sosize, dirs):
"""Find a shared library by name in a list of directories.
Args:
soname: library name (e.g. libfoo.so)
sosize: library file size to match.
dirs: list of directories to look for the corresponding file.
Returns:
host library path if found, or None
"""
for d in dirs:
host_so_file = os.path.join(d, os.path.basename(soname))
if not os.path.isfile(host_so_file):
continue
if os.path.getsize(host_so_file) != sosize:
continue
logging.debug("%s match to the one in APK" % host_so_file)
return host_so_file
def _FindSharedLibraryFromAPKs(output_directory, apks_directory, offset):
"""Find the shared library at the specifc offset of an APK file.
WARNING: This function will look at *all* the apks under
$output_directory/apks/ looking for native libraries they may contain at
|offset|, unless an APKs directory is explicitly specified.
This is error-prone, since a typical full Chrome build has more than a
hundred APKs these days, meaning that several APKs might actually match
the offset.
The function tries to detect this by looking at the names of the
extracted libraries. If they are all the same, it will consider that
as a success, and return its name, even if the APKs embed the same
library at different offsets!!
If there are more than one library at offset from the pool of all APKs,
the function prints an error message and fails.
Args:
output_directory: Chromium output directory.
apks_directory: A optional directory containing (only) the APK in question,
or in the case of a bundle, all split APKs. This overrides the default
apks directory derived from the output directory, and allows for
disambiguation.
offset: APK file offset, as extracted from the stack trace.
Returns:
A (library_name, host_library_path) tuple on success, or (None, None)
in case of failure.
"""
if apks_directory:
if not os.path.isdir(apks_directory):
raise Exception('Explicit APKs directory does not exist: %s',
repr(apks_directory))
else:
apks_directory = os.path.join(output_directory, 'apks')
if not os.path.isdir(apks_directory):
return (None, None)
apks = []
# Walk subdirectories here, in case the directory contains an unzipped bundle
# .apks file, with splits in it.
for d, _, files in os.walk(apks_directory):
apks.extend(
os.path.join(d, f) for f in files if os.path.splitext(f)[1] == '.apk')
shared_libraries = []
for apk in apks:
soname, sosize = GetUncompressedSharedLibraryFromAPK(apk, offset)
if soname == "":
continue
dirs = [output_directory] + [
os.path.join(output_directory, x)
for x in os.listdir(output_directory)
if os.path.exists(os.path.join(output_directory, x, 'lib.unstripped'))
]
host_so_file = _GetSharedLibraryInHost(soname, sosize, dirs)
if host_so_file:
shared_libraries += [(soname, host_so_file)]
# If there are more than one libraries found, it means detecting
# library failed.
number_of_library = len(shared_libraries)
if number_of_library == 1:
return shared_libraries[0]
elif number_of_library > 1:
logging.warning("More than one libraries could be loaded from APK.")
return (None, None)
def _FindAbi(lines):
for line in lines:
match = _ABI_LINE.search(line)
if match:
return match.group('abi')
| bsd-3-clause |
ibinti/intellij-community | python/helpers/py2only/docutils/utils/math/tex2unichar.py | 120 | 35109 | # -*- coding: utf-8 -*-
# LaTeX math to Unicode symbols translation dictionaries.
# Generated with ``write_tex2unichar.py`` from the data in
# http://milde.users.sourceforge.net/LUCR/Math/
# Includes commands from: wasysym, stmaryrd, mathdots, mathabx, esint, bbold, amsxtra, amsmath, amssymb, standard LaTeX
mathaccent = {
'acute': u'\u0301', # x́ COMBINING ACUTE ACCENT
'bar': u'\u0304', # x̄ COMBINING MACRON
'breve': u'\u0306', # x̆ COMBINING BREVE
'check': u'\u030c', # x̌ COMBINING CARON
'ddddot': u'\u20dc', # x⃜ COMBINING FOUR DOTS ABOVE
'dddot': u'\u20db', # x⃛ COMBINING THREE DOTS ABOVE
'ddot': u'\u0308', # ẍ COMBINING DIAERESIS
'dot': u'\u0307', # ẋ COMBINING DOT ABOVE
'grave': u'\u0300', # x̀ COMBINING GRAVE ACCENT
'hat': u'\u0302', # x̂ COMBINING CIRCUMFLEX ACCENT
'mathring': u'\u030a', # x̊ COMBINING RING ABOVE
'not': u'\u0338', # x̸ COMBINING LONG SOLIDUS OVERLAY
'overleftarrow': u'\u20d6', # x⃖ COMBINING LEFT ARROW ABOVE
'overleftrightarrow': u'\u20e1', # x⃡ COMBINING LEFT RIGHT ARROW ABOVE
'overline': u'\u0305', # x̅ COMBINING OVERLINE
'overrightarrow': u'\u20d7', # x⃗ COMBINING RIGHT ARROW ABOVE
'tilde': u'\u0303', # x̃ COMBINING TILDE
'underbar': u'\u0331', # x̱ COMBINING MACRON BELOW
'underleftarrow': u'\u20ee', # x⃮ COMBINING LEFT ARROW BELOW
'underline': u'\u0332', # x̲ COMBINING LOW LINE
'underrightarrow': u'\u20ef', # x⃯ COMBINING RIGHT ARROW BELOW
'vec': u'\u20d7', # x⃗ COMBINING RIGHT ARROW ABOVE
'widehat': u'\u0302', # x̂ COMBINING CIRCUMFLEX ACCENT
'widetilde': u'\u0303', # x̃ COMBINING TILDE
}
mathalpha = {
'Bbbk': u'\U0001d55c', # 𝕜 MATHEMATICAL DOUBLE-STRUCK SMALL K
'Delta': u'\u0394', # Δ GREEK CAPITAL LETTER DELTA
'Gamma': u'\u0393', # Γ GREEK CAPITAL LETTER GAMMA
'Im': u'\u2111', # ℑ BLACK-LETTER CAPITAL I
'Lambda': u'\u039b', # Λ GREEK CAPITAL LETTER LAMDA
'Omega': u'\u03a9', # Ω GREEK CAPITAL LETTER OMEGA
'Phi': u'\u03a6', # Φ GREEK CAPITAL LETTER PHI
'Pi': u'\u03a0', # Π GREEK CAPITAL LETTER PI
'Psi': u'\u03a8', # Ψ GREEK CAPITAL LETTER PSI
'Re': u'\u211c', # ℜ BLACK-LETTER CAPITAL R
'Sigma': u'\u03a3', # Σ GREEK CAPITAL LETTER SIGMA
'Theta': u'\u0398', # Θ GREEK CAPITAL LETTER THETA
'Upsilon': u'\u03a5', # Υ GREEK CAPITAL LETTER UPSILON
'Xi': u'\u039e', # Ξ GREEK CAPITAL LETTER XI
'aleph': u'\u2135', # ℵ ALEF SYMBOL
'alpha': u'\u03b1', # α GREEK SMALL LETTER ALPHA
'beta': u'\u03b2', # β GREEK SMALL LETTER BETA
'beth': u'\u2136', # ℶ BET SYMBOL
'chi': u'\u03c7', # χ GREEK SMALL LETTER CHI
'daleth': u'\u2138', # ℸ DALET SYMBOL
'delta': u'\u03b4', # δ GREEK SMALL LETTER DELTA
'digamma': u'\u03dc', # Ϝ GREEK LETTER DIGAMMA
'ell': u'\u2113', # ℓ SCRIPT SMALL L
'epsilon': u'\u03f5', # ϵ GREEK LUNATE EPSILON SYMBOL
'eta': u'\u03b7', # η GREEK SMALL LETTER ETA
'eth': u'\xf0', # ð LATIN SMALL LETTER ETH
'gamma': u'\u03b3', # γ GREEK SMALL LETTER GAMMA
'gimel': u'\u2137', # ℷ GIMEL SYMBOL
'hbar': u'\u210f', # ℏ PLANCK CONSTANT OVER TWO PI
'hslash': u'\u210f', # ℏ PLANCK CONSTANT OVER TWO PI
'imath': u'\u0131', # ı LATIN SMALL LETTER DOTLESS I
'iota': u'\u03b9', # ι GREEK SMALL LETTER IOTA
'jmath': u'\u0237', # ȷ LATIN SMALL LETTER DOTLESS J
'kappa': u'\u03ba', # κ GREEK SMALL LETTER KAPPA
'lambda': u'\u03bb', # λ GREEK SMALL LETTER LAMDA
'mu': u'\u03bc', # μ GREEK SMALL LETTER MU
'nu': u'\u03bd', # ν GREEK SMALL LETTER NU
'omega': u'\u03c9', # ω GREEK SMALL LETTER OMEGA
'phi': u'\u03d5', # ϕ GREEK PHI SYMBOL
'pi': u'\u03c0', # π GREEK SMALL LETTER PI
'psi': u'\u03c8', # ψ GREEK SMALL LETTER PSI
'rho': u'\u03c1', # ρ GREEK SMALL LETTER RHO
'sigma': u'\u03c3', # σ GREEK SMALL LETTER SIGMA
'tau': u'\u03c4', # τ GREEK SMALL LETTER TAU
'theta': u'\u03b8', # θ GREEK SMALL LETTER THETA
'upsilon': u'\u03c5', # υ GREEK SMALL LETTER UPSILON
'varDelta': u'\U0001d6e5', # 𝛥 MATHEMATICAL ITALIC CAPITAL DELTA
'varGamma': u'\U0001d6e4', # 𝛤 MATHEMATICAL ITALIC CAPITAL GAMMA
'varLambda': u'\U0001d6ec', # 𝛬 MATHEMATICAL ITALIC CAPITAL LAMDA
'varOmega': u'\U0001d6fa', # 𝛺 MATHEMATICAL ITALIC CAPITAL OMEGA
'varPhi': u'\U0001d6f7', # 𝛷 MATHEMATICAL ITALIC CAPITAL PHI
'varPi': u'\U0001d6f1', # 𝛱 MATHEMATICAL ITALIC CAPITAL PI
'varPsi': u'\U0001d6f9', # 𝛹 MATHEMATICAL ITALIC CAPITAL PSI
'varSigma': u'\U0001d6f4', # 𝛴 MATHEMATICAL ITALIC CAPITAL SIGMA
'varTheta': u'\U0001d6e9', # 𝛩 MATHEMATICAL ITALIC CAPITAL THETA
'varUpsilon': u'\U0001d6f6', # 𝛶 MATHEMATICAL ITALIC CAPITAL UPSILON
'varXi': u'\U0001d6ef', # 𝛯 MATHEMATICAL ITALIC CAPITAL XI
'varepsilon': u'\u03b5', # ε GREEK SMALL LETTER EPSILON
'varkappa': u'\U0001d718', # 𝜘 MATHEMATICAL ITALIC KAPPA SYMBOL
'varphi': u'\u03c6', # φ GREEK SMALL LETTER PHI
'varpi': u'\u03d6', # ϖ GREEK PI SYMBOL
'varrho': u'\u03f1', # ϱ GREEK RHO SYMBOL
'varsigma': u'\u03c2', # ς GREEK SMALL LETTER FINAL SIGMA
'vartheta': u'\u03d1', # ϑ GREEK THETA SYMBOL
'wp': u'\u2118', # ℘ SCRIPT CAPITAL P
'xi': u'\u03be', # ξ GREEK SMALL LETTER XI
'zeta': u'\u03b6', # ζ GREEK SMALL LETTER ZETA
}
mathbin = {
'Cap': u'\u22d2', # ⋒ DOUBLE INTERSECTION
'Circle': u'\u25cb', # ○ WHITE CIRCLE
'Cup': u'\u22d3', # ⋓ DOUBLE UNION
'LHD': u'\u25c0', # ◀ BLACK LEFT-POINTING TRIANGLE
'RHD': u'\u25b6', # ▶ BLACK RIGHT-POINTING TRIANGLE
'amalg': u'\u2a3f', # ⨿ AMALGAMATION OR COPRODUCT
'ast': u'\u2217', # ∗ ASTERISK OPERATOR
'barwedge': u'\u22bc', # ⊼ NAND
'bigtriangledown': u'\u25bd', # ▽ WHITE DOWN-POINTING TRIANGLE
'bigtriangleup': u'\u25b3', # △ WHITE UP-POINTING TRIANGLE
'bindnasrepma': u'\u214b', # ⅋ TURNED AMPERSAND
'blacklozenge': u'\u29eb', # ⧫ BLACK LOZENGE
'blacktriangledown': u'\u25be', # ▾ BLACK DOWN-POINTING SMALL TRIANGLE
'blacktriangleleft': u'\u25c2', # ◂ BLACK LEFT-POINTING SMALL TRIANGLE
'blacktriangleright': u'\u25b8', # ▸ BLACK RIGHT-POINTING SMALL TRIANGLE
'blacktriangleup': u'\u25b4', # ▴ BLACK UP-POINTING SMALL TRIANGLE
'boxast': u'\u29c6', # ⧆ SQUARED ASTERISK
'boxbar': u'\u25eb', # ◫ WHITE SQUARE WITH VERTICAL BISECTING LINE
'boxbox': u'\u29c8', # ⧈ SQUARED SQUARE
'boxbslash': u'\u29c5', # ⧅ SQUARED FALLING DIAGONAL SLASH
'boxcircle': u'\u29c7', # ⧇ SQUARED SMALL CIRCLE
'boxdot': u'\u22a1', # ⊡ SQUARED DOT OPERATOR
'boxminus': u'\u229f', # ⊟ SQUARED MINUS
'boxplus': u'\u229e', # ⊞ SQUARED PLUS
'boxslash': u'\u29c4', # ⧄ SQUARED RISING DIAGONAL SLASH
'boxtimes': u'\u22a0', # ⊠ SQUARED TIMES
'bullet': u'\u2219', # ∙ BULLET OPERATOR
'cap': u'\u2229', # ∩ INTERSECTION
'cdot': u'\u22c5', # ⋅ DOT OPERATOR
'circ': u'\u2218', # ∘ RING OPERATOR
'circledast': u'\u229b', # ⊛ CIRCLED ASTERISK OPERATOR
'circledcirc': u'\u229a', # ⊚ CIRCLED RING OPERATOR
'circleddash': u'\u229d', # ⊝ CIRCLED DASH
'cup': u'\u222a', # ∪ UNION
'curlyvee': u'\u22ce', # ⋎ CURLY LOGICAL OR
'curlywedge': u'\u22cf', # ⋏ CURLY LOGICAL AND
'dagger': u'\u2020', # † DAGGER
'ddagger': u'\u2021', # ‡ DOUBLE DAGGER
'diamond': u'\u22c4', # ⋄ DIAMOND OPERATOR
'div': u'\xf7', # ÷ DIVISION SIGN
'divideontimes': u'\u22c7', # ⋇ DIVISION TIMES
'dotplus': u'\u2214', # ∔ DOT PLUS
'doublebarwedge': u'\u2a5e', # ⩞ LOGICAL AND WITH DOUBLE OVERBAR
'intercal': u'\u22ba', # ⊺ INTERCALATE
'interleave': u'\u2af4', # ⫴ TRIPLE VERTICAL BAR BINARY RELATION
'land': u'\u2227', # ∧ LOGICAL AND
'leftthreetimes': u'\u22cb', # ⋋ LEFT SEMIDIRECT PRODUCT
'lhd': u'\u25c1', # ◁ WHITE LEFT-POINTING TRIANGLE
'lor': u'\u2228', # ∨ LOGICAL OR
'ltimes': u'\u22c9', # ⋉ LEFT NORMAL FACTOR SEMIDIRECT PRODUCT
'mp': u'\u2213', # ∓ MINUS-OR-PLUS SIGN
'odot': u'\u2299', # ⊙ CIRCLED DOT OPERATOR
'ominus': u'\u2296', # ⊖ CIRCLED MINUS
'oplus': u'\u2295', # ⊕ CIRCLED PLUS
'oslash': u'\u2298', # ⊘ CIRCLED DIVISION SLASH
'otimes': u'\u2297', # ⊗ CIRCLED TIMES
'pm': u'\xb1', # ± PLUS-MINUS SIGN
'rhd': u'\u25b7', # ▷ WHITE RIGHT-POINTING TRIANGLE
'rightthreetimes': u'\u22cc', # ⋌ RIGHT SEMIDIRECT PRODUCT
'rtimes': u'\u22ca', # ⋊ RIGHT NORMAL FACTOR SEMIDIRECT PRODUCT
'setminus': u'\u29f5', # ⧵ REVERSE SOLIDUS OPERATOR
'slash': u'\u2215', # ∕ DIVISION SLASH
'smallsetminus': u'\u2216', # ∖ SET MINUS
'smalltriangledown': u'\u25bf', # ▿ WHITE DOWN-POINTING SMALL TRIANGLE
'smalltriangleleft': u'\u25c3', # ◃ WHITE LEFT-POINTING SMALL TRIANGLE
'smalltriangleright': u'\u25b9', # ▹ WHITE RIGHT-POINTING SMALL TRIANGLE
'smalltriangleup': u'\u25b5', # ▵ WHITE UP-POINTING SMALL TRIANGLE
'sqcap': u'\u2293', # ⊓ SQUARE CAP
'sqcup': u'\u2294', # ⊔ SQUARE CUP
'sslash': u'\u2afd', # ⫽ DOUBLE SOLIDUS OPERATOR
'star': u'\u22c6', # ⋆ STAR OPERATOR
'talloblong': u'\u2afe', # ⫾ WHITE VERTICAL BAR
'times': u'\xd7', # × MULTIPLICATION SIGN
'triangle': u'\u25b3', # △ WHITE UP-POINTING TRIANGLE
'triangledown': u'\u25bf', # ▿ WHITE DOWN-POINTING SMALL TRIANGLE
'triangleleft': u'\u25c3', # ◃ WHITE LEFT-POINTING SMALL TRIANGLE
'triangleright': u'\u25b9', # ▹ WHITE RIGHT-POINTING SMALL TRIANGLE
'uplus': u'\u228e', # ⊎ MULTISET UNION
'vartriangle': u'\u25b3', # △ WHITE UP-POINTING TRIANGLE
'vee': u'\u2228', # ∨ LOGICAL OR
'veebar': u'\u22bb', # ⊻ XOR
'wedge': u'\u2227', # ∧ LOGICAL AND
'wr': u'\u2240', # ≀ WREATH PRODUCT
}
mathclose = {
'Rbag': u'\u27c6', # ⟆ RIGHT S-SHAPED BAG DELIMITER
'lrcorner': u'\u231f', # ⌟ BOTTOM RIGHT CORNER
'rangle': u'\u27e9', # ⟩ MATHEMATICAL RIGHT ANGLE BRACKET
'rbag': u'\u27c6', # ⟆ RIGHT S-SHAPED BAG DELIMITER
'rbrace': u'}', # } RIGHT CURLY BRACKET
'rbrack': u']', # ] RIGHT SQUARE BRACKET
'rceil': u'\u2309', # ⌉ RIGHT CEILING
'rfloor': u'\u230b', # ⌋ RIGHT FLOOR
'rgroup': u'\u27ef', # ⟯ MATHEMATICAL RIGHT FLATTENED PARENTHESIS
'rrbracket': u'\u27e7', # ⟧ MATHEMATICAL RIGHT WHITE SQUARE BRACKET
'rrparenthesis': u'\u2988', # ⦈ Z NOTATION RIGHT IMAGE BRACKET
'urcorner': u'\u231d', # ⌝ TOP RIGHT CORNER
'}': u'}', # } RIGHT CURLY BRACKET
}
mathfence = {
'Vert': u'\u2016', # ‖ DOUBLE VERTICAL LINE
'vert': u'|', # | VERTICAL LINE
'|': u'\u2016', # ‖ DOUBLE VERTICAL LINE
}
mathop = {
'Join': u'\u2a1d', # ⨝ JOIN
'bigcap': u'\u22c2', # ⋂ N-ARY INTERSECTION
'bigcup': u'\u22c3', # ⋃ N-ARY UNION
'biginterleave': u'\u2afc', # ⫼ LARGE TRIPLE VERTICAL BAR OPERATOR
'bigodot': u'\u2a00', # ⨀ N-ARY CIRCLED DOT OPERATOR
'bigoplus': u'\u2a01', # ⨁ N-ARY CIRCLED PLUS OPERATOR
'bigotimes': u'\u2a02', # ⨂ N-ARY CIRCLED TIMES OPERATOR
'bigsqcup': u'\u2a06', # ⨆ N-ARY SQUARE UNION OPERATOR
'biguplus': u'\u2a04', # ⨄ N-ARY UNION OPERATOR WITH PLUS
'bigvee': u'\u22c1', # ⋁ N-ARY LOGICAL OR
'bigwedge': u'\u22c0', # ⋀ N-ARY LOGICAL AND
'coprod': u'\u2210', # ∐ N-ARY COPRODUCT
'fatsemi': u'\u2a1f', # ⨟ Z NOTATION SCHEMA COMPOSITION
'fint': u'\u2a0f', # ⨏ INTEGRAL AVERAGE WITH SLASH
'iiiint': u'\u2a0c', # ⨌ QUADRUPLE INTEGRAL OPERATOR
'iiint': u'\u222d', # ∭ TRIPLE INTEGRAL
'iint': u'\u222c', # ∬ DOUBLE INTEGRAL
'int': u'\u222b', # ∫ INTEGRAL
'oiint': u'\u222f', # ∯ SURFACE INTEGRAL
'oint': u'\u222e', # ∮ CONTOUR INTEGRAL
'ointctrclockwise': u'\u2233', # ∳ ANTICLOCKWISE CONTOUR INTEGRAL
'prod': u'\u220f', # ∏ N-ARY PRODUCT
'sqint': u'\u2a16', # ⨖ QUATERNION INTEGRAL OPERATOR
'sum': u'\u2211', # ∑ N-ARY SUMMATION
'varointclockwise': u'\u2232', # ∲ CLOCKWISE CONTOUR INTEGRAL
}
mathopen = {
'Lbag': u'\u27c5', # ⟅ LEFT S-SHAPED BAG DELIMITER
'langle': u'\u27e8', # ⟨ MATHEMATICAL LEFT ANGLE BRACKET
'lbag': u'\u27c5', # ⟅ LEFT S-SHAPED BAG DELIMITER
'lbrace': u'{', # { LEFT CURLY BRACKET
'lbrack': u'[', # [ LEFT SQUARE BRACKET
'lceil': u'\u2308', # ⌈ LEFT CEILING
'lfloor': u'\u230a', # ⌊ LEFT FLOOR
'lgroup': u'\u27ee', # ⟮ MATHEMATICAL LEFT FLATTENED PARENTHESIS
'llbracket': u'\u27e6', # ⟦ MATHEMATICAL LEFT WHITE SQUARE BRACKET
'llcorner': u'\u231e', # ⌞ BOTTOM LEFT CORNER
'llparenthesis': u'\u2987', # ⦇ Z NOTATION LEFT IMAGE BRACKET
'ulcorner': u'\u231c', # ⌜ TOP LEFT CORNER
'{': u'{', # { LEFT CURLY BRACKET
}
mathord = {
'#': u'#', # # NUMBER SIGN
'$': u'$', # $ DOLLAR SIGN
'%': u'%', # % PERCENT SIGN
'&': u'&', # & AMPERSAND
'AC': u'\u223f', # ∿ SINE WAVE
'APLcomment': u'\u235d', # ⍝ APL FUNCTIONAL SYMBOL UP SHOE JOT
'APLdownarrowbox': u'\u2357', # ⍗ APL FUNCTIONAL SYMBOL QUAD DOWNWARDS ARROW
'APLinput': u'\u235e', # ⍞ APL FUNCTIONAL SYMBOL QUOTE QUAD
'APLinv': u'\u2339', # ⌹ APL FUNCTIONAL SYMBOL QUAD DIVIDE
'APLleftarrowbox': u'\u2347', # ⍇ APL FUNCTIONAL SYMBOL QUAD LEFTWARDS ARROW
'APLlog': u'\u235f', # ⍟ APL FUNCTIONAL SYMBOL CIRCLE STAR
'APLrightarrowbox': u'\u2348', # ⍈ APL FUNCTIONAL SYMBOL QUAD RIGHTWARDS ARROW
'APLuparrowbox': u'\u2350', # ⍐ APL FUNCTIONAL SYMBOL QUAD UPWARDS ARROW
'Aries': u'\u2648', # ♈ ARIES
'CIRCLE': u'\u25cf', # ● BLACK CIRCLE
'CheckedBox': u'\u2611', # ☑ BALLOT BOX WITH CHECK
'Diamond': u'\u25c7', # ◇ WHITE DIAMOND
'Finv': u'\u2132', # Ⅎ TURNED CAPITAL F
'Game': u'\u2141', # ⅁ TURNED SANS-SERIF CAPITAL G
'Gemini': u'\u264a', # ♊ GEMINI
'Jupiter': u'\u2643', # ♃ JUPITER
'LEFTCIRCLE': u'\u25d6', # ◖ LEFT HALF BLACK CIRCLE
'LEFTcircle': u'\u25d0', # ◐ CIRCLE WITH LEFT HALF BLACK
'Leo': u'\u264c', # ♌ LEO
'Libra': u'\u264e', # ♎ LIBRA
'Mars': u'\u2642', # ♂ MALE SIGN
'Mercury': u'\u263f', # ☿ MERCURY
'Neptune': u'\u2646', # ♆ NEPTUNE
'Pluto': u'\u2647', # ♇ PLUTO
'RIGHTCIRCLE': u'\u25d7', # ◗ RIGHT HALF BLACK CIRCLE
'RIGHTcircle': u'\u25d1', # ◑ CIRCLE WITH RIGHT HALF BLACK
'Saturn': u'\u2644', # ♄ SATURN
'Scorpio': u'\u264f', # ♏ SCORPIUS
'Square': u'\u2610', # ☐ BALLOT BOX
'Sun': u'\u2609', # ☉ SUN
'Taurus': u'\u2649', # ♉ TAURUS
'Uranus': u'\u2645', # ♅ URANUS
'Venus': u'\u2640', # ♀ FEMALE SIGN
'XBox': u'\u2612', # ☒ BALLOT BOX WITH X
'Yup': u'\u2144', # ⅄ TURNED SANS-SERIF CAPITAL Y
'_': u'_', # _ LOW LINE
'angle': u'\u2220', # ∠ ANGLE
'aquarius': u'\u2652', # ♒ AQUARIUS
'aries': u'\u2648', # ♈ ARIES
'ast': u'*', # * ASTERISK
'backepsilon': u'\u03f6', # ϶ GREEK REVERSED LUNATE EPSILON SYMBOL
'backprime': u'\u2035', # ‵ REVERSED PRIME
'backslash': u'\\', # \ REVERSE SOLIDUS
'because': u'\u2235', # ∵ BECAUSE
'bigstar': u'\u2605', # ★ BLACK STAR
'binampersand': u'&', # & AMPERSAND
'blacklozenge': u'\u2b27', # ⬧ BLACK MEDIUM LOZENGE
'blacksmiley': u'\u263b', # ☻ BLACK SMILING FACE
'blacksquare': u'\u25fc', # ◼ BLACK MEDIUM SQUARE
'bot': u'\u22a5', # ⊥ UP TACK
'boy': u'\u2642', # ♂ MALE SIGN
'cancer': u'\u264b', # ♋ CANCER
'capricornus': u'\u2651', # ♑ CAPRICORN
'cdots': u'\u22ef', # ⋯ MIDLINE HORIZONTAL ELLIPSIS
'cent': u'\xa2', # ¢ CENT SIGN
'centerdot': u'\u2b1d', # ⬝ BLACK VERY SMALL SQUARE
'checkmark': u'\u2713', # ✓ CHECK MARK
'circlearrowleft': u'\u21ba', # ↺ ANTICLOCKWISE OPEN CIRCLE ARROW
'circlearrowright': u'\u21bb', # ↻ CLOCKWISE OPEN CIRCLE ARROW
'circledR': u'\xae', # ® REGISTERED SIGN
'circledcirc': u'\u25ce', # ◎ BULLSEYE
'clubsuit': u'\u2663', # ♣ BLACK CLUB SUIT
'complement': u'\u2201', # ∁ COMPLEMENT
'dasharrow': u'\u21e2', # ⇢ RIGHTWARDS DASHED ARROW
'dashleftarrow': u'\u21e0', # ⇠ LEFTWARDS DASHED ARROW
'dashrightarrow': u'\u21e2', # ⇢ RIGHTWARDS DASHED ARROW
'diameter': u'\u2300', # ⌀ DIAMETER SIGN
'diamondsuit': u'\u2662', # ♢ WHITE DIAMOND SUIT
'earth': u'\u2641', # ♁ EARTH
'exists': u'\u2203', # ∃ THERE EXISTS
'female': u'\u2640', # ♀ FEMALE SIGN
'flat': u'\u266d', # ♭ MUSIC FLAT SIGN
'forall': u'\u2200', # ∀ FOR ALL
'fourth': u'\u2057', # ⁗ QUADRUPLE PRIME
'frownie': u'\u2639', # ☹ WHITE FROWNING FACE
'gemini': u'\u264a', # ♊ GEMINI
'girl': u'\u2640', # ♀ FEMALE SIGN
'heartsuit': u'\u2661', # ♡ WHITE HEART SUIT
'infty': u'\u221e', # ∞ INFINITY
'invneg': u'\u2310', # ⌐ REVERSED NOT SIGN
'jupiter': u'\u2643', # ♃ JUPITER
'ldots': u'\u2026', # … HORIZONTAL ELLIPSIS
'leftmoon': u'\u263e', # ☾ LAST QUARTER MOON
'leftturn': u'\u21ba', # ↺ ANTICLOCKWISE OPEN CIRCLE ARROW
'leo': u'\u264c', # ♌ LEO
'libra': u'\u264e', # ♎ LIBRA
'lnot': u'\xac', # ¬ NOT SIGN
'lozenge': u'\u25ca', # ◊ LOZENGE
'male': u'\u2642', # ♂ MALE SIGN
'maltese': u'\u2720', # ✠ MALTESE CROSS
'mathdollar': u'$', # $ DOLLAR SIGN
'measuredangle': u'\u2221', # ∡ MEASURED ANGLE
'mercury': u'\u263f', # ☿ MERCURY
'mho': u'\u2127', # ℧ INVERTED OHM SIGN
'nabla': u'\u2207', # ∇ NABLA
'natural': u'\u266e', # ♮ MUSIC NATURAL SIGN
'neg': u'\xac', # ¬ NOT SIGN
'neptune': u'\u2646', # ♆ NEPTUNE
'nexists': u'\u2204', # ∄ THERE DOES NOT EXIST
'notbackslash': u'\u2340', # ⍀ APL FUNCTIONAL SYMBOL BACKSLASH BAR
'partial': u'\u2202', # ∂ PARTIAL DIFFERENTIAL
'pisces': u'\u2653', # ♓ PISCES
'pluto': u'\u2647', # ♇ PLUTO
'pounds': u'\xa3', # £ POUND SIGN
'prime': u'\u2032', # ′ PRIME
'quarternote': u'\u2669', # ♩ QUARTER NOTE
'rightmoon': u'\u263d', # ☽ FIRST QUARTER MOON
'rightturn': u'\u21bb', # ↻ CLOCKWISE OPEN CIRCLE ARROW
'sagittarius': u'\u2650', # ♐ SAGITTARIUS
'saturn': u'\u2644', # ♄ SATURN
'scorpio': u'\u264f', # ♏ SCORPIUS
'second': u'\u2033', # ″ DOUBLE PRIME
'sharp': u'\u266f', # ♯ MUSIC SHARP SIGN
'sim': u'~', # ~ TILDE
'slash': u'/', # / SOLIDUS
'smiley': u'\u263a', # ☺ WHITE SMILING FACE
'spadesuit': u'\u2660', # ♠ BLACK SPADE SUIT
'spddot': u'\xa8', # ¨ DIAERESIS
'sphat': u'^', # ^ CIRCUMFLEX ACCENT
'sphericalangle': u'\u2222', # ∢ SPHERICAL ANGLE
'sptilde': u'~', # ~ TILDE
'square': u'\u25fb', # ◻ WHITE MEDIUM SQUARE
'sun': u'\u263c', # ☼ WHITE SUN WITH RAYS
'taurus': u'\u2649', # ♉ TAURUS
'therefore': u'\u2234', # ∴ THEREFORE
'third': u'\u2034', # ‴ TRIPLE PRIME
'top': u'\u22a4', # ⊤ DOWN TACK
'triangleleft': u'\u25c5', # ◅ WHITE LEFT-POINTING POINTER
'triangleright': u'\u25bb', # ▻ WHITE RIGHT-POINTING POINTER
'twonotes': u'\u266b', # ♫ BEAMED EIGHTH NOTES
'uranus': u'\u2645', # ♅ URANUS
'varEarth': u'\u2641', # ♁ EARTH
'varnothing': u'\u2205', # ∅ EMPTY SET
'virgo': u'\u264d', # ♍ VIRGO
'wasylozenge': u'\u2311', # ⌑ SQUARE LOZENGE
'wasytherefore': u'\u2234', # ∴ THEREFORE
'yen': u'\xa5', # ¥ YEN SIGN
}
mathover = {
'overbrace': u'\u23de', # ⏞ TOP CURLY BRACKET
'wideparen': u'\u23dc', # ⏜ TOP PARENTHESIS
}
mathradical = {
'sqrt': u'\u221a', # √ SQUARE ROOT
'sqrt[3]': u'\u221b', # ∛ CUBE ROOT
'sqrt[4]': u'\u221c', # ∜ FOURTH ROOT
}
mathrel = {
'Bumpeq': u'\u224e', # ≎ GEOMETRICALLY EQUIVALENT TO
'Doteq': u'\u2251', # ≑ GEOMETRICALLY EQUAL TO
'Downarrow': u'\u21d3', # ⇓ DOWNWARDS DOUBLE ARROW
'Leftarrow': u'\u21d0', # ⇐ LEFTWARDS DOUBLE ARROW
'Leftrightarrow': u'\u21d4', # ⇔ LEFT RIGHT DOUBLE ARROW
'Lleftarrow': u'\u21da', # ⇚ LEFTWARDS TRIPLE ARROW
'Longleftarrow': u'\u27f8', # ⟸ LONG LEFTWARDS DOUBLE ARROW
'Longleftrightarrow': u'\u27fa', # ⟺ LONG LEFT RIGHT DOUBLE ARROW
'Longmapsfrom': u'\u27fd', # ⟽ LONG LEFTWARDS DOUBLE ARROW FROM BAR
'Longmapsto': u'\u27fe', # ⟾ LONG RIGHTWARDS DOUBLE ARROW FROM BAR
'Longrightarrow': u'\u27f9', # ⟹ LONG RIGHTWARDS DOUBLE ARROW
'Lsh': u'\u21b0', # ↰ UPWARDS ARROW WITH TIP LEFTWARDS
'Mapsfrom': u'\u2906', # ⤆ LEFTWARDS DOUBLE ARROW FROM BAR
'Mapsto': u'\u2907', # ⤇ RIGHTWARDS DOUBLE ARROW FROM BAR
'Rightarrow': u'\u21d2', # ⇒ RIGHTWARDS DOUBLE ARROW
'Rrightarrow': u'\u21db', # ⇛ RIGHTWARDS TRIPLE ARROW
'Rsh': u'\u21b1', # ↱ UPWARDS ARROW WITH TIP RIGHTWARDS
'Subset': u'\u22d0', # ⋐ DOUBLE SUBSET
'Supset': u'\u22d1', # ⋑ DOUBLE SUPERSET
'Uparrow': u'\u21d1', # ⇑ UPWARDS DOUBLE ARROW
'Updownarrow': u'\u21d5', # ⇕ UP DOWN DOUBLE ARROW
'VDash': u'\u22ab', # ⊫ DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
'Vdash': u'\u22a9', # ⊩ FORCES
'Vvdash': u'\u22aa', # ⊪ TRIPLE VERTICAL BAR RIGHT TURNSTILE
'apprge': u'\u2273', # ≳ GREATER-THAN OR EQUIVALENT TO
'apprle': u'\u2272', # ≲ LESS-THAN OR EQUIVALENT TO
'approx': u'\u2248', # ≈ ALMOST EQUAL TO
'approxeq': u'\u224a', # ≊ ALMOST EQUAL OR EQUAL TO
'asymp': u'\u224d', # ≍ EQUIVALENT TO
'backsim': u'\u223d', # ∽ REVERSED TILDE
'backsimeq': u'\u22cd', # ⋍ REVERSED TILDE EQUALS
'barin': u'\u22f6', # ⋶ ELEMENT OF WITH OVERBAR
'barleftharpoon': u'\u296b', # ⥫ LEFTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
'barrightharpoon': u'\u296d', # ⥭ RIGHTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
'between': u'\u226c', # ≬ BETWEEN
'bowtie': u'\u22c8', # ⋈ BOWTIE
'bumpeq': u'\u224f', # ≏ DIFFERENCE BETWEEN
'circeq': u'\u2257', # ≗ RING EQUAL TO
'coloneq': u'\u2254', # ≔ COLON EQUALS
'cong': u'\u2245', # ≅ APPROXIMATELY EQUAL TO
'corresponds': u'\u2259', # ≙ ESTIMATES
'curlyeqprec': u'\u22de', # ⋞ EQUAL TO OR PRECEDES
'curlyeqsucc': u'\u22df', # ⋟ EQUAL TO OR SUCCEEDS
'curvearrowleft': u'\u21b6', # ↶ ANTICLOCKWISE TOP SEMICIRCLE ARROW
'curvearrowright': u'\u21b7', # ↷ CLOCKWISE TOP SEMICIRCLE ARROW
'dashv': u'\u22a3', # ⊣ LEFT TACK
'ddots': u'\u22f1', # ⋱ DOWN RIGHT DIAGONAL ELLIPSIS
'dlsh': u'\u21b2', # ↲ DOWNWARDS ARROW WITH TIP LEFTWARDS
'doteq': u'\u2250', # ≐ APPROACHES THE LIMIT
'doteqdot': u'\u2251', # ≑ GEOMETRICALLY EQUAL TO
'downarrow': u'\u2193', # ↓ DOWNWARDS ARROW
'downdownarrows': u'\u21ca', # ⇊ DOWNWARDS PAIRED ARROWS
'downdownharpoons': u'\u2965', # ⥥ DOWNWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
'downharpoonleft': u'\u21c3', # ⇃ DOWNWARDS HARPOON WITH BARB LEFTWARDS
'downharpoonright': u'\u21c2', # ⇂ DOWNWARDS HARPOON WITH BARB RIGHTWARDS
'downuparrows': u'\u21f5', # ⇵ DOWNWARDS ARROW LEFTWARDS OF UPWARDS ARROW
'downupharpoons': u'\u296f', # ⥯ DOWNWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
'drsh': u'\u21b3', # ↳ DOWNWARDS ARROW WITH TIP RIGHTWARDS
'eqcirc': u'\u2256', # ≖ RING IN EQUAL TO
'eqcolon': u'\u2255', # ≕ EQUALS COLON
'eqsim': u'\u2242', # ≂ MINUS TILDE
'eqslantgtr': u'\u2a96', # ⪖ SLANTED EQUAL TO OR GREATER-THAN
'eqslantless': u'\u2a95', # ⪕ SLANTED EQUAL TO OR LESS-THAN
'equiv': u'\u2261', # ≡ IDENTICAL TO
'fallingdotseq': u'\u2252', # ≒ APPROXIMATELY EQUAL TO OR THE IMAGE OF
'frown': u'\u2322', # ⌢ FROWN
'ge': u'\u2265', # ≥ GREATER-THAN OR EQUAL TO
'geq': u'\u2265', # ≥ GREATER-THAN OR EQUAL TO
'geqq': u'\u2267', # ≧ GREATER-THAN OVER EQUAL TO
'geqslant': u'\u2a7e', # ⩾ GREATER-THAN OR SLANTED EQUAL TO
'gets': u'\u2190', # ← LEFTWARDS ARROW
'gg': u'\u226b', # ≫ MUCH GREATER-THAN
'ggcurly': u'\u2abc', # ⪼ DOUBLE SUCCEEDS
'ggg': u'\u22d9', # ⋙ VERY MUCH GREATER-THAN
'gnapprox': u'\u2a8a', # ⪊ GREATER-THAN AND NOT APPROXIMATE
'gneq': u'\u2a88', # ⪈ GREATER-THAN AND SINGLE-LINE NOT EQUAL TO
'gneqq': u'\u2269', # ≩ GREATER-THAN BUT NOT EQUAL TO
'gnsim': u'\u22e7', # ⋧ GREATER-THAN BUT NOT EQUIVALENT TO
'gtrapprox': u'\u2a86', # ⪆ GREATER-THAN OR APPROXIMATE
'gtrdot': u'\u22d7', # ⋗ GREATER-THAN WITH DOT
'gtreqless': u'\u22db', # ⋛ GREATER-THAN EQUAL TO OR LESS-THAN
'gtreqqless': u'\u2a8c', # ⪌ GREATER-THAN ABOVE DOUBLE-LINE EQUAL ABOVE LESS-THAN
'gtrless': u'\u2277', # ≷ GREATER-THAN OR LESS-THAN
'gtrsim': u'\u2273', # ≳ GREATER-THAN OR EQUIVALENT TO
'hash': u'\u22d5', # ⋕ EQUAL AND PARALLEL TO
'hookleftarrow': u'\u21a9', # ↩ LEFTWARDS ARROW WITH HOOK
'hookrightarrow': u'\u21aa', # ↪ RIGHTWARDS ARROW WITH HOOK
'iddots': u'\u22f0', # ⋰ UP RIGHT DIAGONAL ELLIPSIS
'impliedby': u'\u27f8', # ⟸ LONG LEFTWARDS DOUBLE ARROW
'implies': u'\u27f9', # ⟹ LONG RIGHTWARDS DOUBLE ARROW
'in': u'\u2208', # ∈ ELEMENT OF
'le': u'\u2264', # ≤ LESS-THAN OR EQUAL TO
'leftarrow': u'\u2190', # ← LEFTWARDS ARROW
'leftarrowtail': u'\u21a2', # ↢ LEFTWARDS ARROW WITH TAIL
'leftarrowtriangle': u'\u21fd', # ⇽ LEFTWARDS OPEN-HEADED ARROW
'leftbarharpoon': u'\u296a', # ⥪ LEFTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
'leftharpoondown': u'\u21bd', # ↽ LEFTWARDS HARPOON WITH BARB DOWNWARDS
'leftharpoonup': u'\u21bc', # ↼ LEFTWARDS HARPOON WITH BARB UPWARDS
'leftleftarrows': u'\u21c7', # ⇇ LEFTWARDS PAIRED ARROWS
'leftleftharpoons': u'\u2962', # ⥢ LEFTWARDS HARPOON WITH BARB UP ABOVE LEFTWARDS HARPOON WITH BARB DOWN
'leftrightarrow': u'\u2194', # ↔ LEFT RIGHT ARROW
'leftrightarrows': u'\u21c6', # ⇆ LEFTWARDS ARROW OVER RIGHTWARDS ARROW
'leftrightarrowtriangle': u'\u21ff', # ⇿ LEFT RIGHT OPEN-HEADED ARROW
'leftrightharpoon': u'\u294a', # ⥊ LEFT BARB UP RIGHT BARB DOWN HARPOON
'leftrightharpoons': u'\u21cb', # ⇋ LEFTWARDS HARPOON OVER RIGHTWARDS HARPOON
'leftrightsquigarrow': u'\u21ad', # ↭ LEFT RIGHT WAVE ARROW
'leftslice': u'\u2aa6', # ⪦ LESS-THAN CLOSED BY CURVE
'leftsquigarrow': u'\u21dc', # ⇜ LEFTWARDS SQUIGGLE ARROW
'leq': u'\u2264', # ≤ LESS-THAN OR EQUAL TO
'leqq': u'\u2266', # ≦ LESS-THAN OVER EQUAL TO
'leqslant': u'\u2a7d', # ⩽ LESS-THAN OR SLANTED EQUAL TO
'lessapprox': u'\u2a85', # ⪅ LESS-THAN OR APPROXIMATE
'lessdot': u'\u22d6', # ⋖ LESS-THAN WITH DOT
'lesseqgtr': u'\u22da', # ⋚ LESS-THAN EQUAL TO OR GREATER-THAN
'lesseqqgtr': u'\u2a8b', # ⪋ LESS-THAN ABOVE DOUBLE-LINE EQUAL ABOVE GREATER-THAN
'lessgtr': u'\u2276', # ≶ LESS-THAN OR GREATER-THAN
'lesssim': u'\u2272', # ≲ LESS-THAN OR EQUIVALENT TO
'lightning': u'\u21af', # ↯ DOWNWARDS ZIGZAG ARROW
'll': u'\u226a', # ≪ MUCH LESS-THAN
'llcurly': u'\u2abb', # ⪻ DOUBLE PRECEDES
'lll': u'\u22d8', # ⋘ VERY MUCH LESS-THAN
'lnapprox': u'\u2a89', # ⪉ LESS-THAN AND NOT APPROXIMATE
'lneq': u'\u2a87', # ⪇ LESS-THAN AND SINGLE-LINE NOT EQUAL TO
'lneqq': u'\u2268', # ≨ LESS-THAN BUT NOT EQUAL TO
'lnsim': u'\u22e6', # ⋦ LESS-THAN BUT NOT EQUIVALENT TO
'longleftarrow': u'\u27f5', # ⟵ LONG LEFTWARDS ARROW
'longleftrightarrow': u'\u27f7', # ⟷ LONG LEFT RIGHT ARROW
'longmapsfrom': u'\u27fb', # ⟻ LONG LEFTWARDS ARROW FROM BAR
'longmapsto': u'\u27fc', # ⟼ LONG RIGHTWARDS ARROW FROM BAR
'longrightarrow': u'\u27f6', # ⟶ LONG RIGHTWARDS ARROW
'looparrowleft': u'\u21ab', # ↫ LEFTWARDS ARROW WITH LOOP
'looparrowright': u'\u21ac', # ↬ RIGHTWARDS ARROW WITH LOOP
'mapsfrom': u'\u21a4', # ↤ LEFTWARDS ARROW FROM BAR
'mapsto': u'\u21a6', # ↦ RIGHTWARDS ARROW FROM BAR
'mid': u'\u2223', # ∣ DIVIDES
'models': u'\u22a7', # ⊧ MODELS
'multimap': u'\u22b8', # ⊸ MULTIMAP
'nLeftarrow': u'\u21cd', # ⇍ LEFTWARDS DOUBLE ARROW WITH STROKE
'nLeftrightarrow': u'\u21ce', # ⇎ LEFT RIGHT DOUBLE ARROW WITH STROKE
'nRightarrow': u'\u21cf', # ⇏ RIGHTWARDS DOUBLE ARROW WITH STROKE
'nVDash': u'\u22af', # ⊯ NEGATED DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
'nVdash': u'\u22ae', # ⊮ DOES NOT FORCE
'ncong': u'\u2247', # ≇ NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
'ne': u'\u2260', # ≠ NOT EQUAL TO
'nearrow': u'\u2197', # ↗ NORTH EAST ARROW
'neq': u'\u2260', # ≠ NOT EQUAL TO
'ngeq': u'\u2271', # ≱ NEITHER GREATER-THAN NOR EQUAL TO
'ngtr': u'\u226f', # ≯ NOT GREATER-THAN
'ni': u'\u220b', # ∋ CONTAINS AS MEMBER
'nleftarrow': u'\u219a', # ↚ LEFTWARDS ARROW WITH STROKE
'nleftrightarrow': u'\u21ae', # ↮ LEFT RIGHT ARROW WITH STROKE
'nleq': u'\u2270', # ≰ NEITHER LESS-THAN NOR EQUAL TO
'nless': u'\u226e', # ≮ NOT LESS-THAN
'nmid': u'\u2224', # ∤ DOES NOT DIVIDE
'notasymp': u'\u226d', # ≭ NOT EQUIVALENT TO
'notin': u'\u2209', # ∉ NOT AN ELEMENT OF
'notowner': u'\u220c', # ∌ DOES NOT CONTAIN AS MEMBER
'notslash': u'\u233f', # ⌿ APL FUNCTIONAL SYMBOL SLASH BAR
'nparallel': u'\u2226', # ∦ NOT PARALLEL TO
'nprec': u'\u2280', # ⊀ DOES NOT PRECEDE
'npreceq': u'\u22e0', # ⋠ DOES NOT PRECEDE OR EQUAL
'nrightarrow': u'\u219b', # ↛ RIGHTWARDS ARROW WITH STROKE
'nsim': u'\u2241', # ≁ NOT TILDE
'nsubseteq': u'\u2288', # ⊈ NEITHER A SUBSET OF NOR EQUAL TO
'nsucc': u'\u2281', # ⊁ DOES NOT SUCCEED
'nsucceq': u'\u22e1', # ⋡ DOES NOT SUCCEED OR EQUAL
'nsupseteq': u'\u2289', # ⊉ NEITHER A SUPERSET OF NOR EQUAL TO
'ntriangleleft': u'\u22ea', # ⋪ NOT NORMAL SUBGROUP OF
'ntrianglelefteq': u'\u22ec', # ⋬ NOT NORMAL SUBGROUP OF OR EQUAL TO
'ntriangleright': u'\u22eb', # ⋫ DOES NOT CONTAIN AS NORMAL SUBGROUP
'ntrianglerighteq': u'\u22ed', # ⋭ DOES NOT CONTAIN AS NORMAL SUBGROUP OR EQUAL
'nvDash': u'\u22ad', # ⊭ NOT TRUE
'nvdash': u'\u22ac', # ⊬ DOES NOT PROVE
'nwarrow': u'\u2196', # ↖ NORTH WEST ARROW
'owns': u'\u220b', # ∋ CONTAINS AS MEMBER
'parallel': u'\u2225', # ∥ PARALLEL TO
'perp': u'\u27c2', # ⟂ PERPENDICULAR
'pitchfork': u'\u22d4', # ⋔ PITCHFORK
'prec': u'\u227a', # ≺ PRECEDES
'precapprox': u'\u2ab7', # ⪷ PRECEDES ABOVE ALMOST EQUAL TO
'preccurlyeq': u'\u227c', # ≼ PRECEDES OR EQUAL TO
'preceq': u'\u2aaf', # ⪯ PRECEDES ABOVE SINGLE-LINE EQUALS SIGN
'precnapprox': u'\u2ab9', # ⪹ PRECEDES ABOVE NOT ALMOST EQUAL TO
'precnsim': u'\u22e8', # ⋨ PRECEDES BUT NOT EQUIVALENT TO
'precsim': u'\u227e', # ≾ PRECEDES OR EQUIVALENT TO
'propto': u'\u221d', # ∝ PROPORTIONAL TO
'restriction': u'\u21be', # ↾ UPWARDS HARPOON WITH BARB RIGHTWARDS
'rightarrow': u'\u2192', # → RIGHTWARDS ARROW
'rightarrowtail': u'\u21a3', # ↣ RIGHTWARDS ARROW WITH TAIL
'rightarrowtriangle': u'\u21fe', # ⇾ RIGHTWARDS OPEN-HEADED ARROW
'rightbarharpoon': u'\u296c', # ⥬ RIGHTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
'rightharpoondown': u'\u21c1', # ⇁ RIGHTWARDS HARPOON WITH BARB DOWNWARDS
'rightharpoonup': u'\u21c0', # ⇀ RIGHTWARDS HARPOON WITH BARB UPWARDS
'rightleftarrows': u'\u21c4', # ⇄ RIGHTWARDS ARROW OVER LEFTWARDS ARROW
'rightleftharpoon': u'\u294b', # ⥋ LEFT BARB DOWN RIGHT BARB UP HARPOON
'rightleftharpoons': u'\u21cc', # ⇌ RIGHTWARDS HARPOON OVER LEFTWARDS HARPOON
'rightrightarrows': u'\u21c9', # ⇉ RIGHTWARDS PAIRED ARROWS
'rightrightharpoons': u'\u2964', # ⥤ RIGHTWARDS HARPOON WITH BARB UP ABOVE RIGHTWARDS HARPOON WITH BARB DOWN
'rightslice': u'\u2aa7', # ⪧ GREATER-THAN CLOSED BY CURVE
'rightsquigarrow': u'\u21dd', # ⇝ RIGHTWARDS SQUIGGLE ARROW
'risingdotseq': u'\u2253', # ≓ IMAGE OF OR APPROXIMATELY EQUAL TO
'searrow': u'\u2198', # ↘ SOUTH EAST ARROW
'sim': u'\u223c', # ∼ TILDE OPERATOR
'simeq': u'\u2243', # ≃ ASYMPTOTICALLY EQUAL TO
'smallfrown': u'\u2322', # ⌢ FROWN
'smallsmile': u'\u2323', # ⌣ SMILE
'smile': u'\u2323', # ⌣ SMILE
'sqsubset': u'\u228f', # ⊏ SQUARE IMAGE OF
'sqsubseteq': u'\u2291', # ⊑ SQUARE IMAGE OF OR EQUAL TO
'sqsupset': u'\u2290', # ⊐ SQUARE ORIGINAL OF
'sqsupseteq': u'\u2292', # ⊒ SQUARE ORIGINAL OF OR EQUAL TO
'subset': u'\u2282', # ⊂ SUBSET OF
'subseteq': u'\u2286', # ⊆ SUBSET OF OR EQUAL TO
'subseteqq': u'\u2ac5', # ⫅ SUBSET OF ABOVE EQUALS SIGN
'subsetneq': u'\u228a', # ⊊ SUBSET OF WITH NOT EQUAL TO
'subsetneqq': u'\u2acb', # ⫋ SUBSET OF ABOVE NOT EQUAL TO
'succ': u'\u227b', # ≻ SUCCEEDS
'succapprox': u'\u2ab8', # ⪸ SUCCEEDS ABOVE ALMOST EQUAL TO
'succcurlyeq': u'\u227d', # ≽ SUCCEEDS OR EQUAL TO
'succeq': u'\u2ab0', # ⪰ SUCCEEDS ABOVE SINGLE-LINE EQUALS SIGN
'succnapprox': u'\u2aba', # ⪺ SUCCEEDS ABOVE NOT ALMOST EQUAL TO
'succnsim': u'\u22e9', # ⋩ SUCCEEDS BUT NOT EQUIVALENT TO
'succsim': u'\u227f', # ≿ SUCCEEDS OR EQUIVALENT TO
'supset': u'\u2283', # ⊃ SUPERSET OF
'supseteq': u'\u2287', # ⊇ SUPERSET OF OR EQUAL TO
'supseteqq': u'\u2ac6', # ⫆ SUPERSET OF ABOVE EQUALS SIGN
'supsetneq': u'\u228b', # ⊋ SUPERSET OF WITH NOT EQUAL TO
'supsetneqq': u'\u2acc', # ⫌ SUPERSET OF ABOVE NOT EQUAL TO
'swarrow': u'\u2199', # ↙ SOUTH WEST ARROW
'to': u'\u2192', # → RIGHTWARDS ARROW
'trianglelefteq': u'\u22b4', # ⊴ NORMAL SUBGROUP OF OR EQUAL TO
'triangleq': u'\u225c', # ≜ DELTA EQUAL TO
'trianglerighteq': u'\u22b5', # ⊵ CONTAINS AS NORMAL SUBGROUP OR EQUAL TO
'twoheadleftarrow': u'\u219e', # ↞ LEFTWARDS TWO HEADED ARROW
'twoheadrightarrow': u'\u21a0', # ↠ RIGHTWARDS TWO HEADED ARROW
'uparrow': u'\u2191', # ↑ UPWARDS ARROW
'updownarrow': u'\u2195', # ↕ UP DOWN ARROW
'updownarrows': u'\u21c5', # ⇅ UPWARDS ARROW LEFTWARDS OF DOWNWARDS ARROW
'updownharpoons': u'\u296e', # ⥮ UPWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
'upharpoonleft': u'\u21bf', # ↿ UPWARDS HARPOON WITH BARB LEFTWARDS
'upharpoonright': u'\u21be', # ↾ UPWARDS HARPOON WITH BARB RIGHTWARDS
'upuparrows': u'\u21c8', # ⇈ UPWARDS PAIRED ARROWS
'upupharpoons': u'\u2963', # ⥣ UPWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
'vDash': u'\u22a8', # ⊨ TRUE
'varpropto': u'\u221d', # ∝ PROPORTIONAL TO
'vartriangleleft': u'\u22b2', # ⊲ NORMAL SUBGROUP OF
'vartriangleright': u'\u22b3', # ⊳ CONTAINS AS NORMAL SUBGROUP
'vdash': u'\u22a2', # ⊢ RIGHT TACK
'vdots': u'\u22ee', # ⋮ VERTICAL ELLIPSIS
}
mathunder = {
'underbrace': u'\u23df', # ⏟ BOTTOM CURLY BRACKET
}
space = {
':': u'\u205f', # MEDIUM MATHEMATICAL SPACE
'medspace': u'\u205f', # MEDIUM MATHEMATICAL SPACE
'quad': u'\u2001', # EM QUAD
}
| apache-2.0 |
pmaigutyak/mp-shop | delivery/models.py | 1 | 2389 |
from django.db import models
from django.utils.translation import ugettext_lazy as _
class DeliveryMethod(models.Model):
name = models.CharField(_('Name'), max_length=255)
code = models.CharField(_('Code'), max_length=255, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Delivery method')
verbose_name_plural = _('Delivery methods')
class DeliveryMethodField(models.ForeignKey):
def __init__(
self,
to=DeliveryMethod,
verbose_name=_('Delivery method'),
on_delete=models.CASCADE,
null=True,
*args, **kwargs):
super().__init__(
to,
verbose_name=verbose_name,
on_delete=on_delete,
null=null,
*args, **kwargs)
class Region(models.Model):
name = models.CharField(_('Name'), max_length=255)
reference = models.CharField(_('Reference'), max_length=255)
def __str__(self):
if self.reference == '71508128-9b87-11de-822f-000c2965ae0e':
return self.name
return '{} {}'.format(self.name, _('region'))
class Meta:
verbose_name = _('Region')
verbose_name_plural = _('Regions')
class City(models.Model):
region = models.ForeignKey(
Region,
verbose_name=_('Region'),
related_name='cities',
on_delete=models.CASCADE)
name = models.CharField(_('Name'), max_length=255)
reference = models.CharField(_('Reference'), max_length=255)
def __str__(self):
return '{} - {}'.format(self.name, self.region)
class Meta:
verbose_name = _('City')
verbose_name_plural = _('Cities')
class Warehouse(models.Model):
city = models.ForeignKey(
City,
verbose_name=_('City'),
related_name='warehouses',
on_delete=models.CASCADE)
delivery_method = models.ForeignKey(
DeliveryMethod,
verbose_name=_('Delivery method'),
on_delete=models.CASCADE)
name = models.CharField(_('Name'), max_length=255, db_index=True)
reference = models.CharField(_('Reference'), max_length=255)
def __str__(self):
return '{}, {}, {}'.format(self.delivery_method, self.city, self.name)
class Meta:
verbose_name = _('Warehouse')
verbose_name_plural = _('Warehouses')
| isc |
mabotech/mabo.task | py/report/docx_gen.py | 2 | 1230 | # -*- coding: utf-8 -*-
from docx import Document
from docx.shared import Inches
document = Document()
document.add_heading(u'FT汽车', 0)
p = document.add_paragraph(u'汽车工程研究院 ')
p.add_run(u'试验中心').bold = True
p.add_run(u'试验数据管理系统')
p.add_run(u'项目二期。').italic = True
document.add_heading(u'报告说明Heading, level 2', level=2)
document.add_paragraph('Intense quote', style='IntenseQuote')
document.add_paragraph(
u'数据项1 first item in unordered list', style='ListBullet'
)
document.add_paragraph(
u'有序数据项1 first item in ordered list', style='ListNumber'
)
document.add_picture('foton.png', width=Inches(1.25))
document.add_picture('report1.png', width=Inches(3.25))
document.add_heading(
u'数据项', level=2
)
table = document.add_table(rows=1, cols=3, style='TableGrid')
table.autofit = True
hdr_cells = table.rows[0].cells
hdr_cells[0].text = 'Qty'
hdr_cells[1].text = 'Id'
hdr_cells[2].text = 'Desc'
for item in xrange(0,4):
row_cells = table.add_row().cells
row_cells[0].text = str(item)
row_cells[1].text = str(item)
row_cells[2].text = "item.desc"
document.add_page_break()
document.save('foton_test2.docx') | mit |
jcfr/mystic | examples/constraint1_example01.py | 1 | 1202 | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
"""
Example:
- Minimize Rosenbrock's Function with Powell's method.
Demonstrates:
- standard models
- minimal solver interface
- parameter constraints solver
- customized monitors
"""
# Powell's Directonal solver
from mystic.solvers import fmin_powell
# Rosenbrock function
from mystic.models import rosen
# tools
from mystic.monitors import VerboseMonitor
if __name__ == '__main__':
print "Powell's Method"
print "==============="
# initial guess
x0 = [0.8,1.2,0.7]
# define constraints function
def constraints(x):
# constrain the last x_i to be the same value as the first x_i
x[-1] = x[0]
return x
# configure monitor
stepmon = VerboseMonitor(1)
# use Powell's method to minimize the Rosenbrock function
solution = fmin_powell(rosen,x0,constraints=constraints,itermon=stepmon)
print solution
# end of file
| bsd-3-clause |
leiferikb/bitpop | src/tools/telemetry/telemetry/core/forwarders/cros_forwarder.py | 46 | 2295 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import subprocess
from telemetry.core import forwarders
from telemetry.core import util
from telemetry.core.forwarders import do_nothing_forwarder
class CrOsForwarderFactory(forwarders.ForwarderFactory):
def __init__(self, cri):
super(CrOsForwarderFactory, self).__init__()
self._cri = cri
def Create(self, port_pairs, forwarding_flag='R'): # pylint: disable=W0221
if self._cri.local:
return do_nothing_forwarder.DoNothingForwarder(port_pairs)
return CrOsSshForwarder(self._cri, forwarding_flag, port_pairs)
class CrOsSshForwarder(forwarders.Forwarder):
def __init__(self, cri, forwarding_flag, port_pairs):
super(CrOsSshForwarder, self).__init__(port_pairs)
self._cri = cri
self._proc = None
self._forwarding_flag = forwarding_flag
if self._forwarding_flag == 'R':
command_line = ['-%s%i:%s:%i' % (self._forwarding_flag,
port_pair.remote_port,
self.host_ip,
port_pair.local_port)
for port_pair in port_pairs if port_pair]
else:
command_line = ['-%s%i:%s:%i' % (self._forwarding_flag,
port_pair.local_port,
self.host_ip,
port_pair.remote_port)
for port_pair in port_pairs if port_pair]
logging.debug('Forwarding to localhost:%d', port_pairs[0].local_port)
self._proc = subprocess.Popen(
self._cri.FormSSHCommandLine(['sleep', '999999999'], command_line),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
shell=False)
util.WaitFor(
lambda: self._cri.IsHTTPServerRunningOnPort(self.host_port), 60)
logging.debug('Server started on %s:%d', self.host_ip, self.host_port)
@property
def host_port(self):
return self._port_pairs.http.remote_port
def Close(self):
if self._proc:
self._proc.kill()
self._proc = None
super(CrOsSshForwarder, self).Close()
| gpl-3.0 |
joachimmetz/plaso | tests/parsers/winreg_plugins/windows_version.py | 2 | 7444 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the WinVer Windows Registry plugin."""
import unittest
from dfdatetime import filetime as dfdatetime_filetime
from dfwinreg import definitions as dfwinreg_definitions
from dfwinreg import fake as dfwinreg_fake
from plaso.lib import definitions
from plaso.parsers.winreg_plugins import windows_version
from tests import test_lib as shared_test_lib
from tests.parsers.winreg_plugins import test_lib
class WindowsRegistryInstallationEventDataTest(shared_test_lib.BaseTestCase):
"""Tests for the Windows installation event data attribute container."""
def testGetAttributeNames(self):
"""Tests the GetAttributeNames function."""
attribute_container = windows_version.WindowsRegistryInstallationEventData()
expected_attribute_names = [
'_event_data_stream_row_identifier', 'build_number', 'data_type',
'key_path', 'owner', 'parser', 'product_name', 'service_pack',
'version']
attribute_names = sorted(attribute_container.GetAttributeNames())
self.assertEqual(attribute_names, expected_attribute_names)
class WindowsVersionPluginTest(test_lib.RegistryPluginTestCase):
"""Tests for the Windows version Windows Registry plugin."""
def _CreateTestKey(self, key_path, time_string):
"""Creates Registry keys and values for testing.
Args:
key_path (str): Windows Registry key path.
time_string (str): key last written date and time.
Returns:
dfwinreg.WinRegistryKey: a Windows Registry key.
"""
filetime = dfdatetime_filetime.Filetime()
filetime.CopyFromDateTimeString(time_string)
registry_key = dfwinreg_fake.FakeWinRegistryKey(
'CurrentVersion', key_path=key_path,
last_written_time=filetime.timestamp, offset=153)
value_data = 'Service Pack 1'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'CSDVersion', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=1892)
registry_key.AddValue(registry_value)
value_data = '5.1'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'CurrentVersion', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=1121)
registry_key.AddValue(registry_value)
value_data = b'\x13\x1aAP'
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'InstallDate', data=value_data,
data_type=dfwinreg_definitions.REG_DWORD_LITTLE_ENDIAN, offset=1001)
registry_key.AddValue(registry_value)
value_data = 'MyTestOS'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'ProductName', data=value_data, data_type=dfwinreg_definitions.REG_SZ,
offset=123)
registry_key.AddValue(registry_value)
value_data = 'A Concerned Citizen'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'RegisteredOwner', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=612)
registry_key.AddValue(registry_value)
return registry_key
def testFilters(self):
"""Tests the FILTERS class attribute."""
plugin = windows_version.WindowsVersionPlugin()
key_path = (
'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\CurrentVersion')
self._AssertFiltersOnKeyPath(plugin, key_path)
self._AssertNotFiltersOnKeyPath(plugin, 'HKEY_LOCAL_MACHINE\\Bogus')
def testProcess(self):
"""Tests the Process function."""
key_path = (
'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\CurrentVersion')
registry_key = self._CreateTestKey(key_path, '2012-08-31 20:09:55.123521')
plugin = windows_version.WindowsVersionPlugin()
storage_writer = self._ParseKeyWithPlugin(registry_key, plugin)
self.assertEqual(storage_writer.number_of_events, 2)
self.assertEqual(storage_writer.number_of_extraction_warnings, 0)
self.assertEqual(storage_writer.number_of_recovery_warnings, 0)
events = list(storage_writer.GetEvents())
expected_values = (
'CSDVersion: [REG_SZ] Service Pack 1 '
'CurrentVersion: [REG_SZ] 5.1 '
'ProductName: [REG_SZ] MyTestOS '
'RegisteredOwner: [REG_SZ] A Concerned Citizen')
expected_event_values = {
'date_time': '2012-08-31 20:09:55.1235210',
'data_type': 'windows:registry:key_value',
'key_path': key_path,
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
'parser': plugin.NAME,
'timestamp_desc': definitions.TIME_DESCRIPTION_WRITTEN,
'values': expected_values}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
expected_event_values = {
'date_time': '2012-08-31 20:09:55',
'data_type': 'windows:registry:installation',
'key_path': key_path,
'owner': 'A Concerned Citizen',
'product_name': 'MyTestOS',
'service_pack': 'Service Pack 1',
'timestamp_desc': definitions.TIME_DESCRIPTION_INSTALLATION,
'version': '5.1'}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
def testProcessFile(self):
"""Tests the Process function on a Windows Registry file."""
test_file_entry = self._GetTestFileEntry(['SOFTWARE-RunTests'])
key_path = (
'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\CurrentVersion')
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
plugin = windows_version.WindowsVersionPlugin()
storage_writer = self._ParseKeyWithPlugin(
registry_key, plugin, file_entry=test_file_entry)
self.assertEqual(storage_writer.number_of_events, 2)
self.assertEqual(storage_writer.number_of_extraction_warnings, 0)
self.assertEqual(storage_writer.number_of_recovery_warnings, 0)
events = list(storage_writer.GetEvents())
expected_values = (
'BuildGUID: [REG_SZ] f4bf21b9-55fe-4ee8-a84b-0e91cbd5fe5d '
'BuildLab: [REG_SZ] 7601.win7sp1_gdr.111118-2330 '
'BuildLabEx: [REG_SZ] 7601.17727.amd64fre.win7sp1_gdr.111118-2330 '
'CSDBuildNumber: [REG_SZ] 1130 '
'CSDVersion: [REG_SZ] Service Pack 1 '
'CurrentBuild: [REG_SZ] 7601 '
'CurrentBuildNumber: [REG_SZ] 7601 '
'CurrentType: [REG_SZ] Multiprocessor Free '
'CurrentVersion: [REG_SZ] 6.1 '
'DigitalProductId: [REG_BINARY] (164 bytes) '
'DigitalProductId4: [REG_BINARY] (1272 bytes) '
'EditionID: [REG_SZ] Ultimate '
'InstallationType: [REG_SZ] Client '
'PathName: [REG_SZ] C:\\Windows '
'ProductId: [REG_SZ] 00426-065-0381817-86216 '
'ProductName: [REG_SZ] Windows 7 Ultimate '
'RegisteredOrganization: [REG_SZ] '
'RegisteredOwner: [REG_SZ] Windows User '
'SoftwareType: [REG_SZ] System '
'SystemRoot: [REG_SZ] C:\\Windows')
expected_event_values = {
'date_time': '2012-03-15 07:09:20.6718750',
'data_type': 'windows:registry:key_value',
'key_path': key_path,
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
'parser': plugin.NAME,
'values': expected_values}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
msiedlarek/grpc | src/python/grpcio/tests/unit/_links/_transmission_test.py | 9 | 10243 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests transmission of tickets across gRPC-on-the-wire."""
import unittest
from grpc._adapter import _intermediary_low
from grpc._links import invocation
from grpc._links import service
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.interfaces.links import links
from tests.unit import test_common
from tests.unit._links import _proto_scenarios
from tests.unit.framework.common import test_constants
from tests.unit.framework.interfaces.links import test_cases
from tests.unit.framework.interfaces.links import test_utilities
_IDENTITY = lambda x: x
class TransmissionTest(test_cases.TransmissionTest, unittest.TestCase):
def create_transmitting_links(self):
service_link = service.service_link(
{self.group_and_method(): self.deserialize_request},
{self.group_and_method(): self.serialize_response})
port = service_link.add_port('[::]:0', None)
service_link.start()
channel = _intermediary_low.Channel('localhost:%d' % port, None)
invocation_link = invocation.invocation_link(
channel, 'localhost', None,
{self.group_and_method(): self.serialize_request},
{self.group_and_method(): self.deserialize_response})
invocation_link.start()
return invocation_link, service_link
def destroy_transmitting_links(self, invocation_side_link, service_side_link):
invocation_side_link.stop()
service_side_link.begin_stop()
service_side_link.end_stop()
def create_invocation_initial_metadata(self):
return (
('first_invocation_initial_metadata_key', 'just a string value'),
('second_invocation_initial_metadata_key', '0123456789'),
('third_invocation_initial_metadata_key-bin', '\x00\x57' * 100),
)
def create_invocation_terminal_metadata(self):
return None
def create_service_initial_metadata(self):
return (
('first_service_initial_metadata_key', 'just another string value'),
('second_service_initial_metadata_key', '9876543210'),
('third_service_initial_metadata_key-bin', '\x00\x59\x02' * 100),
)
def create_service_terminal_metadata(self):
return (
('first_service_terminal_metadata_key', 'yet another string value'),
('second_service_terminal_metadata_key', 'abcdefghij'),
('third_service_terminal_metadata_key-bin', '\x00\x37' * 100),
)
def create_invocation_completion(self):
return None, None
def create_service_completion(self):
return (
beta_interfaces.StatusCode.OK, b'An exuberant test "details" message!')
def assertMetadataTransmitted(self, original_metadata, transmitted_metadata):
self.assertTrue(
test_common.metadata_transmitted(
original_metadata, transmitted_metadata),
'%s erroneously transmitted as %s' % (
original_metadata, transmitted_metadata))
class RoundTripTest(unittest.TestCase):
def testZeroMessageRoundTrip(self):
test_operation_id = object()
test_group = 'test package.Test Group'
test_method = 'test method'
identity_transformation = {(test_group, test_method): _IDENTITY}
test_code = beta_interfaces.StatusCode.OK
test_message = 'a test message'
service_link = service.service_link(
identity_transformation, identity_transformation)
service_mate = test_utilities.RecordingLink()
service_link.join_link(service_mate)
port = service_link.add_port('[::]:0', None)
service_link.start()
channel = _intermediary_low.Channel('localhost:%d' % port, None)
invocation_link = invocation.invocation_link(
channel, None, None, identity_transformation, identity_transformation)
invocation_mate = test_utilities.RecordingLink()
invocation_link.join_link(invocation_mate)
invocation_link.start()
invocation_ticket = links.Ticket(
test_operation_id, 0, test_group, test_method,
links.Ticket.Subscription.FULL, test_constants.LONG_TIMEOUT, None, None,
None, None, None, None, links.Ticket.Termination.COMPLETION, None)
invocation_link.accept_ticket(invocation_ticket)
service_mate.block_until_tickets_satisfy(test_cases.terminated)
service_ticket = links.Ticket(
service_mate.tickets()[-1].operation_id, 0, None, None, None, None,
None, None, None, None, test_code, test_message,
links.Ticket.Termination.COMPLETION, None)
service_link.accept_ticket(service_ticket)
invocation_mate.block_until_tickets_satisfy(test_cases.terminated)
invocation_link.stop()
service_link.begin_stop()
service_link.end_stop()
self.assertIs(
service_mate.tickets()[-1].termination,
links.Ticket.Termination.COMPLETION)
self.assertIs(
invocation_mate.tickets()[-1].termination,
links.Ticket.Termination.COMPLETION)
self.assertIs(invocation_mate.tickets()[-1].code, test_code)
self.assertEqual(invocation_mate.tickets()[-1].message, test_message)
def _perform_scenario_test(self, scenario):
test_operation_id = object()
test_group, test_method = scenario.group_and_method()
test_code = beta_interfaces.StatusCode.OK
test_message = 'a scenario test message'
service_link = service.service_link(
{(test_group, test_method): scenario.deserialize_request},
{(test_group, test_method): scenario.serialize_response})
service_mate = test_utilities.RecordingLink()
service_link.join_link(service_mate)
port = service_link.add_port('[::]:0', None)
service_link.start()
channel = _intermediary_low.Channel('localhost:%d' % port, None)
invocation_link = invocation.invocation_link(
channel, 'localhost', None,
{(test_group, test_method): scenario.serialize_request},
{(test_group, test_method): scenario.deserialize_response})
invocation_mate = test_utilities.RecordingLink()
invocation_link.join_link(invocation_mate)
invocation_link.start()
invocation_ticket = links.Ticket(
test_operation_id, 0, test_group, test_method,
links.Ticket.Subscription.FULL, test_constants.LONG_TIMEOUT, None, None,
None, None, None, None, None, None)
invocation_link.accept_ticket(invocation_ticket)
requests = scenario.requests()
for request_index, request in enumerate(requests):
request_ticket = links.Ticket(
test_operation_id, 1 + request_index, None, None, None, None, 1, None,
request, None, None, None, None, None)
invocation_link.accept_ticket(request_ticket)
service_mate.block_until_tickets_satisfy(
test_cases.at_least_n_payloads_received_predicate(1 + request_index))
response_ticket = links.Ticket(
service_mate.tickets()[0].operation_id, request_index, None, None,
None, None, 1, None, scenario.response_for_request(request), None,
None, None, None, None)
service_link.accept_ticket(response_ticket)
invocation_mate.block_until_tickets_satisfy(
test_cases.at_least_n_payloads_received_predicate(1 + request_index))
request_count = len(requests)
invocation_completion_ticket = links.Ticket(
test_operation_id, request_count + 1, None, None, None, None, None,
None, None, None, None, None, links.Ticket.Termination.COMPLETION,
None)
invocation_link.accept_ticket(invocation_completion_ticket)
service_mate.block_until_tickets_satisfy(test_cases.terminated)
service_completion_ticket = links.Ticket(
service_mate.tickets()[0].operation_id, request_count, None, None, None,
None, None, None, None, None, test_code, test_message,
links.Ticket.Termination.COMPLETION, None)
service_link.accept_ticket(service_completion_ticket)
invocation_mate.block_until_tickets_satisfy(test_cases.terminated)
invocation_link.stop()
service_link.begin_stop()
service_link.end_stop()
observed_requests = tuple(
ticket.payload for ticket in service_mate.tickets()
if ticket.payload is not None)
observed_responses = tuple(
ticket.payload for ticket in invocation_mate.tickets()
if ticket.payload is not None)
self.assertTrue(scenario.verify_requests(observed_requests))
self.assertTrue(scenario.verify_responses(observed_responses))
def testEmptyScenario(self):
self._perform_scenario_test(_proto_scenarios.EmptyScenario())
def testBidirectionallyUnaryScenario(self):
self._perform_scenario_test(_proto_scenarios.BidirectionallyUnaryScenario())
def testBidirectionallyStreamingScenario(self):
self._perform_scenario_test(
_proto_scenarios.BidirectionallyStreamingScenario())
if __name__ == '__main__':
unittest.main(verbosity=2)
| bsd-3-clause |
sahpat229/POLLUTION | POLLUTION/settings.py | 1 | 3175 | """
Django settings for POLLUTION project.
Generated by 'django-admin startproject' using Django 1.9.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'egppn2gm^$=yub$1y*co6(#cb9=st%youf!=5@_p92%j^vxjbr'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'POLLUTION.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'POLLUTION.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| mit |
joshblum/django-with-audit | django/contrib/gis/geometry/test_data.py | 364 | 2994 | """
This module has the mock object definitions used to hold reference geometry
for the GEOS and GDAL tests.
"""
import gzip
import os
from django.contrib import gis
from django.utils import simplejson
# This global used to store reference geometry data.
GEOMETRIES = None
# Path where reference test data is located.
TEST_DATA = os.path.join(os.path.dirname(gis.__file__), 'tests', 'data')
def tuplize(seq):
"Turn all nested sequences to tuples in given sequence."
if isinstance(seq, (list, tuple)):
return tuple([tuplize(i) for i in seq])
return seq
def strconvert(d):
"Converts all keys in dictionary to str type."
return dict([(str(k), v) for k, v in d.iteritems()])
def get_ds_file(name, ext):
return os.path.join(TEST_DATA,
name,
name + '.%s' % ext
)
class TestObj(object):
"""
Base testing object, turns keyword args into attributes.
"""
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
class TestDS(TestObj):
"""
Object for testing GDAL data sources.
"""
def __init__(self, name, **kwargs):
# Shapefile is default extension, unless specified otherwise.
ext = kwargs.pop('ext', 'shp')
self.ds = get_ds_file(name, ext)
super(TestDS, self).__init__(**kwargs)
class TestGeom(TestObj):
"""
Testing object used for wrapping reference geometry data
in GEOS/GDAL tests.
"""
def __init__(self, **kwargs):
# Converting lists to tuples of certain keyword args
# so coordinate test cases will match (JSON has no
# concept of tuple).
coords = kwargs.pop('coords', None)
if coords:
self.coords = tuplize(coords)
centroid = kwargs.pop('centroid', None)
if centroid:
self.centroid = tuple(centroid)
ext_ring_cs = kwargs.pop('ext_ring_cs', None)
if ext_ring_cs:
ext_ring_cs = tuplize(ext_ring_cs)
self.ext_ring_cs = ext_ring_cs
super(TestGeom, self).__init__(**kwargs)
class TestGeomSet(object):
"""
Each attribute of this object is a list of `TestGeom` instances.
"""
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, [TestGeom(**strconvert(kw)) for kw in value])
class TestDataMixin(object):
"""
Mixin used for GEOS/GDAL test cases that defines a `geometries`
property, which returns and/or loads the reference geometry data.
"""
@property
def geometries(self):
global GEOMETRIES
if GEOMETRIES is None:
# Load up the test geometry data from fixture into global.
gzf = gzip.GzipFile(os.path.join(TEST_DATA, 'geometries.json.gz'))
geometries = simplejson.loads(gzf.read())
GEOMETRIES = TestGeomSet(**strconvert(geometries))
return GEOMETRIES
| bsd-3-clause |
typesupply/dialogKit | examples/GlyphViewDemo.py | 3 | 3938 | from FL import *
from dialogKit import *
class GlyphViewDemo(object):
def __init__(self):
self.font= fl.font
self.glyphs = {}
for glyph in self.font.glyphs:
self.glyphs[glyph.name] = glyph
glyphNames = self.glyphs.keys()
glyphNames.sort()
#
self.w = ModalDialog((700, 500), 'GlyphView Demo')
self.w.glyphList = List((10, 10, 150, -60), glyphNames, callback=self.glyphListCallback)
self.w.view = GlyphView((170, 10, 400, -60), None, None)
#
self.w.fillCheckBox = CheckBox((580, 10, -10, 20), 'Fill', value=True, callback=self.viewOptionsCallback)
self.w.outlineCheckBox = CheckBox((580, 35, -10, 20), 'Outline', value=False, callback=self.viewOptionsCallback)
self.w.pointsCheckBox = CheckBox((580, 60, -10, 20), 'Points', value=True, callback=self.viewOptionsCallback)
self.w.descenderCheckBox = CheckBox((580, 85, -10, 20), 'Descender', value=True, callback=self.viewOptionsCallback)
self.w.baselineCheckBox = CheckBox((580, 110, -10, 20), 'Baseline', value=True, callback=self.viewOptionsCallback)
self.w.xHeightCheckBox = CheckBox((580, 135, -10, 20), 'X Height', value=True, callback=self.viewOptionsCallback)
self.w.ascenderCheckBox = CheckBox((580, 160, -10, 20), 'Ascender', value=True, callback=self.viewOptionsCallback)
self.w.capHeightCheckBox = CheckBox((580, 185, -10, 20), 'Cap Height', value=True, callback=self.viewOptionsCallback)
self.w.upmTopCheckBox = CheckBox((580, 210, -10, 20), 'UPM Top', value=False, callback=self.viewOptionsCallback)
self.w.leftCheckBox = CheckBox((580, 235, -10, 20), 'Left', value=True, callback=self.viewOptionsCallback)
self.w.rightCheckBox = CheckBox((580, 260, -10, 20), 'Right', value=True, callback=self.viewOptionsCallback)
#
self.w.open()
def glyphListCallback(self, sender):
selection = sender.getSelection()
if not selection:
font = glyph = None
else:
glyphName = sender[selection[0]]
glyph = self.glyphs[glyphName]
font = self.font
self.w.view.set(font, glyph)
self.w.view.update()
def viewOptionsCallback(self, sender):
if self.w.fillCheckBox.get() != self.w.view.getShowFill():
self.w.view.setShowFill(self.w.fillCheckBox.get())
if self.w.outlineCheckBox.get() != self.w.view.getShowOutline():
self.w.view.setShowOutline(self.w.outlineCheckBox.get())
if self.w.pointsCheckBox.get() != self.w.view.getShowOnCurvePoints():
self.w.view.setShowOnCurvePoints(self.w.pointsCheckBox.get())
if self.w.descenderCheckBox.get() != self.w.view.getShowDescender():
self.w.view.setShowDescender(self.w.descenderCheckBox.get())
if self.w.baselineCheckBox.get() != self.w.view.getShowBaseline():
self.w.view.setShowBaseline(self.w.baselineCheckBox.get())
if self.w.xHeightCheckBox.get() != self.w.view.getShowXHeight():
self.w.view.setShowXHeight(self.w.xHeightCheckBox.get())
if self.w.ascenderCheckBox.get() != self.w.view.getShowAscender():
self.w.view.setShowAscender(self.w.ascenderCheckBox.get())
if self.w.capHeightCheckBox.get() != self.w.view.getShowCapHeight():
self.w.view.setShowCapHeight(self.w.capHeightCheckBox.get())
if self.w.upmTopCheckBox.get() != self.w.view.getShowUPMTop():
self.w.view.setShowUPMTop(self.w.upmTopCheckBox.get())
if self.w.leftCheckBox.get() != self.w.view.getShowLeftSidebearing():
self.w.view.setShowLeftSidebearing(self.w.leftCheckBox.get())
if self.w.rightCheckBox.get() != self.w.view.getShowRightSidebearing():
self.w.view.setShowRightSidebearing(self.w.rightCheckBox.get())
self.w.view.update()
GlyphViewDemo() | mit |
vaygr/ansible | lib/ansible/module_utils/facts/system/chroot.py | 40 | 1029 | # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils.facts.collector import BaseFactCollector
def is_chroot():
is_chroot = None
if os.environ.get('debian_chroot', False):
is_chroot = True
else:
my_root = os.stat('/')
try:
# check if my file system is the root one
proc_root = os.stat('/proc/1/root/.')
is_chroot = my_root.st_ino != proc_root.st_ino or my_root.st_dev != proc_root.st_dev
except:
# I'm not root or no proc, fallback to checking it is inode #2
is_chroot = (my_root.st_ino != 2)
return is_chroot
class ChrootFactCollector(BaseFactCollector):
name = 'chroot'
_fact_ids = set(['is_chroot'])
def collect(self, module=None, collected_facts=None):
return {'is_chroot': is_chroot()}
| gpl-3.0 |
Stanford-Online/edx-platform | openedx/core/djangoapps/oauth_dispatch/dot_overrides/validators.py | 10 | 5245 | """
Classes that override default django-oauth-toolkit behavior
"""
from __future__ import unicode_literals
from datetime import datetime
from django.contrib.auth import authenticate, get_user_model
from django.contrib.auth.backends import AllowAllUsersModelBackend as UserModelBackend
from django.db.models.signals import pre_save
from django.dispatch import receiver
from oauth2_provider.models import AccessToken
from oauth2_provider.oauth2_validators import OAuth2Validator
from oauth2_provider.scopes import get_scopes_backend
from pytz import utc
from ratelimitbackend.backends import RateLimitMixin
from ..models import RestrictedApplication
@receiver(pre_save, sender=AccessToken)
def on_access_token_presave(sender, instance, *args, **kwargs): # pylint: disable=unused-argument
"""
Mark AccessTokens as expired for 'restricted applications' if required.
"""
if RestrictedApplication.should_expire_access_token(instance.application):
instance.expires = datetime(1970, 1, 1, tzinfo=utc)
class EdxRateLimitedAllowAllUsersModelBackend(RateLimitMixin, UserModelBackend):
"""
Authentication backend needed to incorporate rate limiting of login attempts - but also
enabling users with is_active of False in the Django auth_user model to still authenticate.
This is necessary for mobile users using 3rd party auth who have not activated their accounts,
Inactive users who use 1st party auth (username/password auth) will still fail login attempts,
just at a higher layer, in the login_user view.
See: https://openedx.atlassian.net/browse/TNL-4516
"""
pass
class EdxOAuth2Validator(OAuth2Validator):
"""
Validator class that implements edX-specific custom behavior:
* It allows users to log in with their email or username.
* It does not require users to be active before logging in.
"""
def validate_user(self, username, password, client, request, *args, **kwargs):
"""
Authenticate users, but allow inactive users (with u.is_active == False)
to authenticate.
"""
user = self._authenticate(username=username, password=password)
if user is not None:
request.user = user
return True
return False
def _authenticate(self, username, password):
"""
Authenticate the user, allowing the user to identify themselves either
by username or email
"""
authenticated_user = authenticate(username=username, password=password)
if authenticated_user is None:
UserModel = get_user_model() # pylint: disable=invalid-name
try:
email_user = UserModel.objects.get(email=username)
except UserModel.DoesNotExist:
authenticated_user = None
else:
authenticated_user = authenticate(username=email_user.username, password=password)
return authenticated_user
def save_bearer_token(self, token, request, *args, **kwargs):
"""
Ensure that access tokens issued via client credentials grant are
associated with the owner of the ``Application``.
Also, update the `expires_in` value in the token response for
RestrictedApplications.
"""
grant_type = request.grant_type
user = request.user
if grant_type == 'client_credentials':
# Temporarily remove the grant type to avoid triggering the super method's code that removes request.user.
request.grant_type = None
# Ensure the tokens get associated with the correct user since DOT does not normally
# associate access tokens issued with the client_credentials grant to users.
request.user = request.client.user
super(EdxOAuth2Validator, self).save_bearer_token(token, request, *args, **kwargs)
if RestrictedApplication.should_expire_access_token(request.client):
# Since RestrictedApplications will override the DOT defined expiry, so that access_tokens
# are always expired, we need to re-read the token from the database and then calculate the
# expires_in (in seconds) from what we stored in the database. This value should be a negative
#value, meaning that it is already expired
access_token = AccessToken.objects.get(token=token['access_token'])
utc_now = datetime.utcnow().replace(tzinfo=utc)
expires_in = (access_token.expires - utc_now).total_seconds()
# assert that RestrictedApplications only issue expired tokens
# blow up processing if we see otherwise
assert expires_in < 0
token['expires_in'] = expires_in
# Restore the original request attributes
request.grant_type = grant_type
request.user = user
def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs):
"""
Ensure required scopes are permitted (as specified in the settings file)
"""
available_scopes = get_scopes_backend().get_available_scopes(application=client, request=request)
return set(scopes).issubset(set(available_scopes))
| agpl-3.0 |
abligh/xen4.2-minideb | tools/python/xen/xend/server/BlktapController.py | 26 | 10719 | # Copyright (c) 2005, XenSource Ltd.
import string, re, os
from xen.xend.server.blkif import BlkifController
from xen.xend.XendLogging import log
from xen.util.xpopen import xPopen3
phantomDev = 0;
phantomId = 0;
blktap1_disk_types = [
'aio',
'sync',
'vmdk',
'ram',
'qcow',
'qcow2',
'ioemu',
]
blktap2_disk_types = [
'aio',
'ram',
'qcow',
'vhd',
'remus',
]
blktap_disk_types = blktap1_disk_types + blktap2_disk_types
def doexec(args, inputtext=None):
"""Execute a subprocess, then return its return code, stdout and stderr"""
proc = xPopen3(args, True)
if inputtext != None:
proc.tochild.write(inputtext)
stdout = proc.fromchild
stderr = proc.childerr
rc = proc.wait()
return (rc,stdout,stderr)
# blktap1 device controller
class BlktapController(BlkifController):
def __init__(self, vm):
BlkifController.__init__(self, vm)
def frontendRoot(self):
"""@see DevController#frontendRoot"""
return "%s/device/vbd" % self.vm.getDomainPath()
def getDeviceDetails(self, config):
(devid, back, front) = BlkifController.getDeviceDetails(self, config)
phantomDevid = 0
wrapped = False
try:
imagetype = self.vm.info['image']['type']
except:
imagetype = ""
if imagetype == 'hvm':
tdevname = back['dev']
index = ['c', 'd', 'e', 'f', 'g', 'h', 'i', \
'j', 'l', 'm', 'n', 'o', 'p']
while True:
global phantomDev
global phantomId
import os, stat
phantomId = phantomId + 1
if phantomId == 16:
if index[phantomDev] == index[-1]:
if wrapped:
raise VmError(" No loopback block \
devices are available. ")
wrapped = True
phantomDev = 0
else:
phantomDev = phantomDev + 1
phantomId = 1
devname = 'xvd%s%d' % (index[phantomDev], phantomId)
try:
info = os.stat('/dev/%s' % devname)
except:
break
vbd = { 'mode': 'w', 'device': devname }
fn = 'tap:%s' % back['params']
# recurse ... by creating the vbd, then fallthrough
# and finish creating the original device
from xen.xend import XendDomain
dom0 = XendDomain.instance().privilegedDomain()
phantomDevid = dom0.create_phantom_vbd_with_vdi(vbd, fn)
# we need to wait for this device at a higher level
# the vbd that gets created will have a link to us
# and will let them do it there
# add a hook to point to the phantom device,
# root path is always the same (dom0 tap)
if phantomDevid != 0:
front['phantom_vbd'] = '/local/domain/0/backend/tap/0/%s' \
% str(phantomDevid)
return (devid, back, front)
class Blktap2Controller(BlktapController):
def __init__(self, vm):
BlktapController.__init__(self, vm)
def backendPath(self, backdom, devid):
if self.deviceClass == 'tap2':
deviceClass = 'vbd'
else:
deviceClass = 'tap'
return "%s/backend/%s/%s/%d" % (backdom.getDomainPath(),
deviceClass,
self.vm.getDomid(), devid)
def getDeviceDetails(self, config):
(devid, back, front) = BlktapController.getDeviceDetails(self, config)
if self.deviceClass == 'tap2':
# since blktap2 uses blkback as a backend the 'params' feild contains
# the path to the blktap2 device (/dev/xen/blktap-2/tapdev*). As well,
# we need to store the params used to create the blktap2 device
# (tap:tapdisk:<driver>:/<image-path>)
tapdisk_uname = config.get('tapdisk_uname', '')
(_, tapdisk_params) = string.split(tapdisk_uname, ':', 1)
back['tapdisk-params'] = tapdisk_params
return (devid, back, front)
def getDeviceConfiguration(self, devid, transaction = None):
# this is a blktap2 device, so we need to overwrite the 'params' feild
# with the actual blktap2 parameters. (the vbd parameters are of little
# use to us)
config = BlktapController.getDeviceConfiguration(self, devid, transaction)
if transaction is None:
tapdisk_params = self.readBackend(devid, 'tapdisk-params')
else:
tapdisk_params = self.readBackendTxn(transaction, devid, 'tapdisk-params')
if tapdisk_params:
config['uname'] = 'tap:' + tapdisk_params
return config
def createDevice(self, config):
uname = config.get('uname', '')
try:
(typ, subtyp, params, file) = string.split(uname, ':', 3)
if subtyp not in ('tapdisk', 'ioemu'):
raise ValueError('invalid subtype')
except:
(typ, params, file) = string.split(uname, ':', 2)
subtyp = 'tapdisk'
if typ in ('tap'):
if subtyp in ('tapdisk', 'ioemu'):
if params not in blktap2_disk_types or \
TapdiskController.check():
# pass this device off to BlktapController
log.warn('WARNING: using deprecated blktap module')
self.deviceClass = 'tap'
devid = BlktapController.createDevice(self, config)
self.deviceClass = 'tap2'
return devid
device = TapdiskController.create(params, file)
# modify the configutration to create a blkback for the underlying
# blktap2 device. Note: we need to preserve the original tapdisk uname
# (it is used during save/restore and for managed domains).
config.update({'tapdisk_uname' : uname})
config.update({'uname' : 'phy:' + device.rstrip()})
devid = BlkifController.createDevice(self, config)
config.update({'uname' : uname})
config.pop('tapdisk_uname')
return devid
# This function is called from a thread when the
# domain is detached from the disk.
def finishDeviceCleanup(self, backpath, path):
"""Perform any device specific cleanup
@backpath backend xenstore path.
@path frontend device path
"""
#Figure out what we're going to wait on.
self.waitForBackend_destroy(backpath)
TapdiskController.destroy(path)
class TapdiskException(Exception):
pass
class TapdiskController(object):
'''class which encapsulates all tapdisk control operations'''
TAP_CTL = 'tap-ctl'
TAP_DEV = '/dev/xen/blktap-2/tapdev'
class Tapdisk(object):
def __init__(self, pid=None, minor=-1, state=None,
dtype='', image=None, device=None):
self.pid = pid
self.minor = minor
self.state = state
self.dtype = dtype
self.image = image
self.device = device
def __str__(self):
return 'image=%s pid=%s minor=%s state=%s type=%s device=%s' \
% (self.image, self.pid, self.minor, self.state, self.dtype,
self.device)
@staticmethod
def exc(*args):
rc, stdout, stderr = doexec([TapdiskController.TAP_CTL] + list(args))
out, err = stdout.read().strip(), stderr.read().strip()
stdout.close()
stderr.close()
if rc:
raise TapdiskException('%s failed (%s %s %s)' % \
(args, rc, out, err))
return out
@staticmethod
def check():
try:
TapdiskController.exc('check')
return 0
except Exception, e:
log.warn("tapdisk2 check failed: %s" % e)
return -1
@staticmethod
def list():
tapdisks = []
_list = TapdiskController.exc('list')
if not _list: return []
for line in _list.splitlines():
tapdisk = TapdiskController.Tapdisk()
# Since 'tap-ctl list' does not escape blanks in the path, hard-code the current format using 4 pairs to prevent splitting the path
for pair in line.split(None, 3):
key, value = pair.split('=', 1)
if key == 'pid':
tapdisk.pid = value
elif key == 'minor':
tapdisk.minor = int(value)
if tapdisk.minor >= 0:
tapdisk.device = '%s%s' % \
(TapdiskController.TAP_DEV, tapdisk.minor)
elif key == 'state':
tapdisk.state = value
elif key == 'args' and value.find(':') != -1:
tapdisk.dtype, tapdisk.image = value.split(':', 1)
tapdisks.append(tapdisk)
return tapdisks
@staticmethod
def fromDevice(device):
if device.startswith(TapdiskController.TAP_DEV):
minor = os.minor(os.stat(device).st_rdev)
tapdisks = filter(lambda x: x.minor == minor,
TapdiskController.list())
if len(tapdisks) == 1:
return tapdisks[0]
return None
@staticmethod
def create(dtype, image):
return TapdiskController.exc('create', '-a%s:%s' % (dtype, image))
@staticmethod
def destroy(device):
tapdisk = TapdiskController.fromDevice(device)
if tapdisk:
if tapdisk.pid:
TapdiskController.exc('destroy',
'-p%s' % tapdisk.pid,
'-m%s' % tapdisk.minor)
else:
TapdiskController.exc('free', '-m%s' % tapdisk.minor)
@staticmethod
def pause(device):
tapdisk = TapdiskController.fromDevice(device)
if tapdisk and tapdisk.pid:
TapdiskController.exc('pause',
'-p%s' % tapdisk.pid,
'-m%s' % tapdisk.minor)
@staticmethod
def unpause(device):
tapdisk = TapdiskController.fromDevice(device)
if tapdisk and tapdisk.pid:
TapdiskController.exc('unpause',
'-p%s' % tapdisk.pid,
'-m%s' % tapdisk.minor)
| gpl-2.0 |
zzliujianbo/shadowsocks | shadowsocks/utils.py | 1 | 11775 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import json
import sys
import getopt
import logging
from shadowsocks.common import to_bytes, to_str
VERBOSE_LEVEL = 5
def check_python():
info = sys.version_info
if info[0] == 2 and not info[1] >= 6:
print('Python 2.6+ required')
sys.exit(1)
elif info[0] == 3 and not info[1] >= 3:
print('Python 3.3+ required')
sys.exit(1)
elif info[0] not in [2, 3]:
print('Python version not supported')
sys.exit(1)
def print_shadowsocks():
version = ''
try:
import pkg_resources
version = pkg_resources.get_distribution('shadowsocks').version
except Exception:
pass
print('shadowsocks %s' % version)
def find_config():
config_path = 'config.json'
if os.path.exists(config_path):
return config_path
config_path = os.path.join(os.path.dirname(__file__), '../', 'config.json')
if os.path.exists(config_path):
return config_path
return None
def check_config(config):
if config.get('local_address', '') in [b'0.0.0.0']:
logging.warn('warning: local set to listen on 0.0.0.0, it\'s not safe')
if config.get('server', '') in [b'127.0.0.1', b'localhost']:
logging.warn('warning: server set to listen on %s:%s, are you sure?' %
(to_str(config['server']), config['server_port']))
if (config.get('method', '') or '').lower() == b'table':
logging.warn('warning: table is not safe; please use a safer cipher, '
'like AES-256-CFB')
if (config.get('method', '') or '').lower() == b'rc4':
logging.warn('warning: RC4 is not safe; please use a safer cipher, '
'like AES-256-CFB')
if config.get('timeout', 300) < 100:
logging.warn('warning: your timeout %d seems too short' %
int(config.get('timeout')))
if config.get('timeout', 300) > 600:
logging.warn('warning: your timeout %d seems too long' %
int(config.get('timeout')))
if config.get('password') in [b'mypassword']:
logging.error('DON\'T USE DEFAULT PASSWORD! Please change it in your '
'config.json!')
exit(1)
def get_config(is_local):
logging.basicConfig(level=logging.INFO,
format='%(levelname)-s: %(message)s')
if is_local:
shortopts = 'hd:s:b:p:k:l:m:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=']
else:
shortopts = 'hd:s:p:k:m:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'workers=']
try:
config_path = find_config()
optlist, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
for key, value in optlist:
if key == '-c':
config_path = value
if config_path:
logging.info('loading config from %s' % config_path)
with open(config_path, 'rb') as f:
try:
config = json.loads(f.read().decode('utf8'),
object_hook=_decode_dict)
except ValueError as e:
logging.error('found an error in config.json: %s',
e.message)
sys.exit(1)
else:
config = {}
optlist, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
v_count = 0
for key, value in optlist:
if key == '-p':
config['server_port'] = int(value)
elif key == '-k':
config['password'] = to_bytes(value)
elif key == '-l':
config['local_port'] = int(value)
elif key == '-s':
config['server'] = to_bytes(value)
elif key == '-m':
config['method'] = to_bytes(value)
elif key == '-b':
config['local_address'] = to_bytes(value)
elif key == '-v':
v_count += 1
# '-vv' turns on more verbose mode
config['verbose'] = v_count
elif key == '-t':
config['timeout'] = int(value)
elif key == '--fast-open':
config['fast_open'] = True
elif key == '--workers':
config['workers'] = int(value)
elif key in ('-h', '--help'):
if is_local:
print_local_help()
else:
print_server_help()
sys.exit(0)
elif key == '-d':
config['daemon'] = value
elif key == '--pid-file':
config['pid-file'] = value
elif key == '--log-file':
config['log-file'] = value
elif key == '-q':
v_count -= 1
config['verbose'] = v_count
except getopt.GetoptError as e:
print(e, file=sys.stderr)
print_help(is_local)
sys.exit(2)
if not config:
logging.error('config not specified')
print_help(is_local)
sys.exit(2)
config['password'] = config.get('password', '')
config['method'] = config.get('method', 'aes-256-cfb')
config['port_password'] = config.get('port_password', None)
config['timeout'] = int(config.get('timeout', 300))
config['fast_open'] = config.get('fast_open', False)
config['workers'] = config.get('workers', 1)
config['pid-file'] = config.get('pid-file', '/var/run/shadowsocks.pid')
config['log-file'] = config.get('log-file', '/var/log/shadowsocks.log')
config['workers'] = config.get('workers', 1)
config['verbose'] = config.get('verbose', False)
config['local_address'] = config.get('local_address', '127.0.0.1')
config['local_port'] = config.get('local_port', 1080)
if is_local:
if config.get('server', None) is None:
logging.error('server addr not specified')
print_local_help()
sys.exit(2)
else:
config['server'] = config.get('server', '0.0.0.0')
config['server_port'] = config.get('server_port', 8388)
if is_local and not config.get('password', None):
logging.error('password not specified')
print_help(is_local)
sys.exit(2)
if not is_local and not config.get('password', None) \
and not config.get('port_password', None):
logging.error('password or port_password not specified')
print_help(is_local)
sys.exit(2)
if 'local_port' in config:
config['local_port'] = int(config['local_port'])
if 'server_port' in config and type(config['server_port']) != list:
config['server_port'] = int(config['server_port'])
logging.getLogger('').handlers = []
logging.addLevelName(VERBOSE_LEVEL, 'VERBOSE')
if config['verbose'] >= 2:
level = VERBOSE_LEVEL
elif config['verbose'] == 1:
level = logging.DEBUG
elif config['verbose'] == -1:
level = logging.WARN
elif config['verbose'] <= -2:
level = logging.ERROR
else:
level = logging.INFO
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
check_config(config)
return config
def print_help(is_local):
if is_local:
print_local_help()
else:
print_server_help()
def print_local_help():
print('''usage: sslocal [-h] -s SERVER_ADDR [-p SERVER_PORT]
[-b LOCAL_ADDR] [-l LOCAL_PORT] -k PASSWORD [-m METHOD]
[-t TIMEOUT] [-c CONFIG] [--fast-open] [-v] -[d] [-q]
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-h, --help show this help message and exit
-c CONFIG path to config file
-s SERVER_ADDR server address
-p SERVER_PORT server port, default: 8388
-b LOCAL_ADDR local binding address, default: 127.0.0.1
-l LOCAL_PORT local port, default: 1080
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
General options:
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def print_server_help():
print('''usage: ssserver [-h] [-s SERVER_ADDR] [-p SERVER_PORT] -k PASSWORD
-m METHOD [-t TIMEOUT] [-c CONFIG] [--fast-open]
[--workers WORKERS] [-v] [-d start] [-q]
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-h, --help show this help message and exit
-c CONFIG path to config file
-s SERVER_ADDR server address, default: 0.0.0.0
-p SERVER_PORT server port, default: 8388
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
--workers WORKERS number of workers, available on Unix/Linux
General options:
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def _decode_list(data):
rv = []
for item in data:
if hasattr(item, 'encode'):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if hasattr(value, 'encode'):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
| mit |
mitchrule/Miscellaneous | Django_Project/django/Lib/site-packages/wheel/signatures/ed25519py.py | 565 | 1695 | # -*- coding: utf-8 -*-
import warnings
import os
from collections import namedtuple
from . import djbec
__all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair',
'PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES']
PUBLICKEYBYTES=32
SECRETKEYBYTES=64
SIGNATUREBYTES=64
Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key
def crypto_sign_keypair(seed=None):
"""Return (verifying, secret) key from a given seed, or os.urandom(32)"""
if seed is None:
seed = os.urandom(PUBLICKEYBYTES)
else:
warnings.warn("ed25519ll should choose random seed.",
RuntimeWarning)
if len(seed) != 32:
raise ValueError("seed must be 32 random bytes or None.")
skbytes = seed
vkbytes = djbec.publickey(skbytes)
return Keypair(vkbytes, skbytes+vkbytes)
def crypto_sign(msg, sk):
"""Return signature+message given message and secret key.
The signature is the first SIGNATUREBYTES bytes of the return value.
A copy of msg is in the remainder."""
if len(sk) != SECRETKEYBYTES:
raise ValueError("Bad signing key length %d" % len(sk))
vkbytes = sk[PUBLICKEYBYTES:]
skbytes = sk[:PUBLICKEYBYTES]
sig = djbec.signature(msg, skbytes, vkbytes)
return sig + msg
def crypto_sign_open(signed, vk):
"""Return message given signature+message and the verifying key."""
if len(vk) != PUBLICKEYBYTES:
raise ValueError("Bad verifying key length %d" % len(vk))
rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk)
if not rc:
raise ValueError("rc != True", rc)
return signed[SIGNATUREBYTES:]
| mit |
philipbl/home-assistant | homeassistant/components/sensor/serial_pm.py | 17 | 2799 | """
Support for particulate matter sensors connected to a serial port.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.serial_pm/
"""
import logging
import voluptuous as vol
from homeassistant.const import CONF_NAME
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
REQUIREMENTS = ['pmsensor==0.3']
_LOGGER = logging.getLogger(__name__)
CONF_SERIAL_DEVICE = 'serial_device'
CONF_BRAND = 'brand'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_BRAND): cv.string,
vol.Required(CONF_SERIAL_DEVICE): cv.string,
vol.Optional(CONF_NAME): cv.string,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the available PM sensors."""
from pmsensor import serial_pm as pm
try:
coll = pm.PMDataCollector(
config.get(CONF_SERIAL_DEVICE),
pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)]
)
except KeyError:
_LOGGER.error("Brand %s not supported\n supported brands: %s",
config.get(CONF_BRAND), pm.SUPPORTED_SENSORS.keys())
return
except OSError as err:
_LOGGER.error("Could not open serial connection to %s (%s)",
config.get(CONF_SERIAL_DEVICE), err)
return
dev = []
for pmname in coll.supported_values():
if config.get(CONF_NAME) is None:
name = '{} PM{}'.format(config.get(CONF_NAME), pmname)
else:
name = 'PM{}'.format(pmname)
dev.append(ParticulateMatterSensor(coll, name, pmname))
add_devices(dev)
class ParticulateMatterSensor(Entity):
"""Representation of an Particulate matter sensor."""
def __init__(self, pmDataCollector, name, pmname):
"""Initialize a new PM sensor."""
self._name = name
self._pmname = pmname
self._state = None
self._collector = pmDataCollector
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return "µg/m³"
def update(self):
"""Read from sensor and update the state."""
_LOGGER.debug("Reading data from PM sensor")
try:
self._state = self._collector.read_data()[self._pmname]
except KeyError:
_LOGGER.error("Could not read PM%s value", self._pmname)
def should_poll(self):
"""Sensor needs polling."""
return True
| mit |
TheTimmy/spack | lib/spack/external/_pytest/tmpdir.py | 12 | 4124 | """ support for providing temporary directories to test functions. """
import re
import pytest
import py
from _pytest.monkeypatch import MonkeyPatch
class TempdirFactory:
"""Factory for temporary directories under the common base temp directory.
The base directory can be configured using the ``--basetemp`` option.
"""
def __init__(self, config):
self.config = config
self.trace = config.trace.get("tmpdir")
def ensuretemp(self, string, dir=1):
""" (deprecated) return temporary directory path with
the given string as the trailing part. It is usually
better to use the 'tmpdir' function argument which
provides an empty unique-per-test-invocation directory
and is guaranteed to be empty.
"""
#py.log._apiwarn(">1.1", "use tmpdir function argument")
return self.getbasetemp().ensure(string, dir=dir)
def mktemp(self, basename, numbered=True):
"""Create a subdirectory of the base temporary directory and return it.
If ``numbered``, ensure the directory is unique by adding a number
prefix greater than any existing one.
"""
basetemp = self.getbasetemp()
if not numbered:
p = basetemp.mkdir(basename)
else:
p = py.path.local.make_numbered_dir(prefix=basename,
keep=0, rootdir=basetemp, lock_timeout=None)
self.trace("mktemp", p)
return p
def getbasetemp(self):
""" return base temporary directory. """
try:
return self._basetemp
except AttributeError:
basetemp = self.config.option.basetemp
if basetemp:
basetemp = py.path.local(basetemp)
if basetemp.check():
basetemp.remove()
basetemp.mkdir()
else:
temproot = py.path.local.get_temproot()
user = get_user()
if user:
# use a sub-directory in the temproot to speed-up
# make_numbered_dir() call
rootdir = temproot.join('pytest-of-%s' % user)
else:
rootdir = temproot
rootdir.ensure(dir=1)
basetemp = py.path.local.make_numbered_dir(prefix='pytest-',
rootdir=rootdir)
self._basetemp = t = basetemp.realpath()
self.trace("new basetemp", t)
return t
def finish(self):
self.trace("finish")
def get_user():
"""Return the current user name, or None if getuser() does not work
in the current environment (see #1010).
"""
import getpass
try:
return getpass.getuser()
except (ImportError, KeyError):
return None
# backward compatibility
TempdirHandler = TempdirFactory
def pytest_configure(config):
"""Create a TempdirFactory and attach it to the config object.
This is to comply with existing plugins which expect the handler to be
available at pytest_configure time, but ideally should be moved entirely
to the tmpdir_factory session fixture.
"""
mp = MonkeyPatch()
t = TempdirFactory(config)
config._cleanup.extend([mp.undo, t.finish])
mp.setattr(config, '_tmpdirhandler', t, raising=False)
mp.setattr(pytest, 'ensuretemp', t.ensuretemp, raising=False)
@pytest.fixture(scope='session')
def tmpdir_factory(request):
"""Return a TempdirFactory instance for the test session.
"""
return request.config._tmpdirhandler
@pytest.fixture
def tmpdir(request, tmpdir_factory):
"""Return a temporary directory path object
which is unique to each test function invocation,
created as a sub directory of the base temporary
directory. The returned object is a `py.path.local`_
path object.
"""
name = request.node.name
name = re.sub("[\W]", "_", name)
MAXVAL = 30
if len(name) > MAXVAL:
name = name[:MAXVAL]
x = tmpdir_factory.mktemp(name, numbered=True)
return x
| lgpl-2.1 |
cklb/PyMoskito | pymoskito/simulation_modules.py | 1 | 14724 | import logging
from copy import copy
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from PyQt5.QtCore import QObject
pyqtWrapperType = type(QObject)
__all__ = ["SimulationModule", "SimulationException",
"Trajectory", "Feedforward", "Controller", "Limiter",
"ModelMixer", "Model", "ModelException",
"Solver", "Disturbance", "Sensor", "ObserverMixer", "Observer"]
class SimulationModuleMeta(ABCMeta, pyqtWrapperType):
pass
class SimulationException(Exception):
pass
class SimulationModule(QObject, metaclass=SimulationModuleMeta):
"""
Smallest unit pof the simulation framework.
This class provides necessary functions like output calculation and holds
all settings that can be accessed by the user.
The :py:attr:`public_settings` are read by the
:py:class:`.SimulationInterface` and the rendered by the GUI. All entries
stated in this dictionary will be available as changeable settings for the
module.
On initialization, a possibly modified (in terms of its values) version of
this dict will be passed back to this class and is thenceforward available
via the :py:attr:`settings` property.
The most important method is :py:func:`calc_output` which is called by the
:py:class:`Simulator` to retrieve this modules output.
Args:
settings(OrderedDict): Settings for this simulation module.
These entries will be shown in the properties view and can be
changed by the user. The important entries for this base class are:
`output info`:
Dict holding an information dictionaries with keys `Name` and
`Unit` for each element in the output data.
If available, these information are used to display reasonable names
in the result view and to display the corresponding units for the
result plots.
Warn:
Do NOT use '.' in the `output_info` name field.
TODO:
Get rif of the point restriction
"""
def __init__(self, settings):
QObject.__init__(self, None)
self._logger = logging.getLogger(self.__class__.__name__)
assert isinstance(settings, dict)
self._settings = copy(settings)
self._settings["tick divider"] = settings.get("tick divider", 1)
self._settings["step width"] = None
self._settings.pop("modules", None)
@property
@abstractmethod
def public_settings(self):
pass
@property
def settings(self):
return self._settings
@property
def tick_divider(self):
return self._settings["tick divider"]
@property
def step_width(self):
return self._settings["step width"]
@step_width.setter
def step_width(self, value):
self._settings["step width"] = value
@abstractmethod
def calc_output(self, input_vector):
pass
class ModelException(SimulationException):
"""
Exception to be raised if the current system state violates modelling
assumptions.
"""
pass
class Model(SimulationModule):
"""
Base class for all user defined system models in state-space form.
Args:
settings (dict): Dictionary holding the config options for this module.
It must contain the following keys:
:input_count:
The length of the input vector for this model.
:state_count:
The length of the state vector for this model.
:initial state:
The initial state vector for this model.
"""
def __init__(self, settings):
SimulationModule.__init__(self, settings)
assert ("state_count" in settings)
assert ("input_count" in settings)
assert ("initial state" in settings)
assert len(settings["initial state"]) == settings["state_count"]
@property
def initial_state(self):
""" Return the initial state of the system. """
return self._settings["initial state"]
@abstractmethod
def state_function(self, t, x, args):
"""
Calculate the state derivatives of a system with state x at time t.
Args:
x(Array-like): System state.
t(float): System time.
Returns:
Temporal derivative of the system state at time t.
"""
pass
def root_function(self, x):
"""
Check whether a reinitialisation of the integrator should be performed.
This can be the case if there are discontinuities in the system dynamics
such as switching.
Args:
x(array-like): Current system state.
Returns:
tuple:
* bool: `True` if reset is advised.
* array-like: State to continue with.
"""
return False, x
def check_consistency(self, x):
"""
Check whether the assumptions, made in the modelling process are
violated.
Args:
x: Current system state
Raises:
:py:class:`ModelException` : If a violation is detected. This will
stop the simulation process.
"""
pass
class SolverException(SimulationException):
pass
class Solver(SimulationModule):
"""
Base Class for solver implementations
"""
def __init__(self, settings):
assert isinstance(settings["modules"]["Model"], Model)
self._model = settings["modules"]["Model"]
self.next_output = None
SimulationModule.__init__(self, settings)
def calc_output(self, input_vector):
self.set_input(input_vector["system_input"])
output = self.next_output
self.next_output = self.integrate(input_vector["time"])
try:
self._model.check_consistency(self.next_output)
except ModelException as e:
raise SolverException("Timestep Integration failed! "
"Model raised: {0}".format(e))
return output
@abstractmethod
def set_input(self, *args):
pass
@abstractmethod
def integrate(self, t):
pass
@property
@abstractmethod
def t(self):
pass
@property
@abstractmethod
def successful(self):
pass
class ControllerException(SimulationException):
pass
class Controller(SimulationModule):
"""
Base class for controllers.
Args:
settings (dict): Dictionary holding the config options for this module.
It must contain the following keys:
:input_order:
The order of required derivatives from the trajectory generator.
:input_type:
Source for the feedback calculation and one of the following:
`system_state` , `system_output` , `Observer` or `Sensor` .
"""
# selectable input sources for controller
input_sources = ["system_state", "system_output", "Observer", "Sensor"]
def __init__(self, settings):
SimulationModule.__init__(self, settings)
assert ("input_order" in settings)
assert ("input_type" in settings)
assert (settings["input_type"] in self.input_sources)
@property
def input_order(self):
return self._settings["input_order"]
def calc_output(self, input_vector):
input_values = next((input_vector[src] for src in self.input_sources
if src == self._settings["input_type"]), None)
if input_values is None:
raise ControllerException("Selected Input not available")
trajectory_values = input_vector.get("Trajectory", None)
feedforward_values = input_vector.get("Feedforward", None)
return self._control(input_vector["time"], trajectory_values,
feedforward_values, input_values)
@abstractmethod
def _control(self, time, trajectory_values=None, feedforward_values=None,
input_values=None, **kwargs):
"""
Placeholder for control law calculations.
For more sophisticated implementations overload :py:func:`calc_output` .
Args:
time (float): Current time.
trajectory_values (array-like): Desired values from the trajectory
generator.
feedforward_values (array-like): Output of feedforward block.
input_values (array-like): The input values selected by
``input_type`` .
**kwargs: Placeholder for custom parameters.
Returns:
Array: Control output.
"""
pass
class Observer(SimulationModule):
"""
Base class for observers
"""
def __init__(self, settings):
SimulationModule.__init__(self, settings)
def calc_output(self, input_vector):
system_input = input_vector.get("system_input", None)
if "ObserverMixer" in input_vector:
system_output = input_vector["ObserverMixer"]
elif "system_output" in input_vector:
system_output = input_vector["system_output"]
else:
raise SimulationException("No Observer input specified")
return self._observe(input_vector["time"], system_input, system_output)
@abstractmethod
def _observe(self, time, system_input, system_output):
"""
Placeholder for observer law.
Args:
time: Current time.
system_input: Current system input.
system_output: Current system output.
Returns:
Estimated system state
"""
pass
class Feedforward(SimulationModule):
"""
Base class for all feedforward implementations
"""
def __init__(self, settings):
self._model = settings["modules"]["Model"]
SimulationModule.__init__(self, settings)
assert ("input_order" in settings)
@property
def input_order(self):
return self._settings["input_order"]
def calc_output(self, input_dict):
return self._feedforward(input_dict["time"], input_dict["Trajectory"])
@abstractmethod
def _feedforward(self, time, trajectory_values):
"""
Placeholder for feedforward calculations.
Args:
time (float): Current time.
trajectory_values(array-like): Desired values from the trajectory
generator.
Returns:
Array: Feedforward output. This signal can be added to the
controllers output via the :py:class:`.ModelMixer` and is also
directly passed to the controller.
"""
pass
class TrajectoryException(SimulationException):
pass
class Trajectory(SimulationModule):
"""
Base class for all trajectory generators
"""
def __init__(self, settings):
control_order = 0
feedforward_order = 0
if "Controller" in settings["modules"].keys():
control_order = settings["modules"]["Controller"].input_order
if "Feedforward" in settings["modules"].keys():
feedforward_order = settings["modules"]["Feedforward"].input_order
settings.update(differential_order=max([control_order,
feedforward_order]))
SimulationModule.__init__(self, settings)
def calc_output(self, input_vector):
desired = self._desired_values(input_vector["time"])
return desired
@abstractmethod
def _desired_values(self, t):
"""
Placeholder for calculations of desired values.
Args:
t (float): Time.
Returns:
Array: Trajectory output. This should always be a two-dimensional
array holding the components in to 0th and their derivatives in
the 1th axis.
"""
pass
class MixerException(Exception):
pass
class SignalMixer(SimulationModule):
"""
Base class for all Signal mixing modules
"""
def __init__(self, settings):
assert "input signals" in settings
SimulationModule.__init__(self, settings)
def calc_output(self, input_vector):
signals = [value for signal, value in input_vector.items()
if signal in self._settings["input signals"]]
return self._mix(signals)
class ModelMixer(SignalMixer):
pass
class ObserverMixer(SignalMixer):
pass
class Limiter(SimulationModule):
"""
Base class for all limiter variants
"""
def __init__(self, settings):
assert "input_signal" in settings
SimulationModule.__init__(self, settings)
def calc_output(self, input_dict):
return self._limit(input_dict[self._settings["input_signal"]])
def _limit(self, values):
"""
Placeholder for actual limit calculations.
Args:
values(array-like): Values to limit.
Returns:
Array: Limited output.
"""
return values
class Sensor(SimulationModule):
"""
Base class for all sensor variants
"""
def __init__(self, settings):
assert "input signal" in settings
SimulationModule.__init__(self, settings)
def calc_output(self, input_dict):
return self._measure(input_dict[self._settings["input signal"]])
def _measure(self, value):
"""
Placeholder for measurement calculations.
One may reorder or remove state elements or introduce measurement delays
here.
Args:
value (array-like float): Values from the source selected by the
``input_signal`` property.
Returns:
array-like float: 'Measured' values.
"""
return value
class Disturbance(SimulationModule):
"""
Base class for all disturbance variants
"""
def __init__(self, settings):
assert "input signal" in settings
SimulationModule.__init__(self, settings)
def calc_output(self, input_dict):
return self._disturb(input_dict[self._settings["input signal"]])
@abstractmethod
def _disturb(self, value):
"""
Placeholder for disturbance calculations.
If the noise is to be dependent on the measured signal use its `value`
to create the noise.
Args:
value (array-like float): Values from the source selected by the
``input_signal`` property.
Returns:
array-like float: Noise that will be mixed with a signal later on.
"""
pass
| bsd-3-clause |
mcedit/mcedit | albow/menu_bar.py | 1 | 1799 | #
# Albow - Menu bar
#
from pygame import Rect
from widget import Widget, overridable_property
class MenuBar(Widget):
menus = overridable_property('menus', "List of Menu instances")
def __init__(self, menus=None, width=0, **kwds):
font = self.predict_font(kwds)
height = font.get_linesize()
Widget.__init__(self, Rect(0, 0, width, height), **kwds)
self.menus = menus or []
self._hilited_menu = None
def get_menus(self):
return self._menus
def set_menus(self, x):
self._menus = x
def draw(self, surf):
fg = self.fg_color
bg = self.bg_color
font = self.font
hilited = self._hilited_menu
x = 0
for menu in self._menus:
text = " %s " % menu.title
if menu is hilited:
buf = font.render(text, True, bg, fg)
else:
buf = font.render(text, True, fg, bg)
surf.blit(buf, (x, 0))
x += surf.get_width()
def mouse_down(self, e):
mx = e.local[0]
font = self.font
x = 0
for menu in self._menus:
text = " %s " % menu.title
w = font.size(text)[0]
if x <= mx < x + w:
self.show_menu(menu, x)
def show_menu(self, menu, x):
self._hilited_menu = menu
try:
i = menu.present(self, (x, self.height))
finally:
self._hilited_menu = None
menu.invoke_item(i)
def handle_command_key(self, e):
menus = self.menus
for m in xrange(len(menus) - 1, -1, -1):
menu = menus[m]
i = menu.find_item_for_key(e)
if i >= 0:
menu.invoke_item(i)
return True
return False
| isc |
justinhayes/cm_api | python/src/cm_api/endpoints/roles.py | 1 | 8270 | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cm_api.endpoints.types import *
__docformat__ = "epytext"
ROLES_PATH = "/clusters/%s/services/%s/roles"
CM_ROLES_PATH = "/cm/service/roles"
def _get_roles_path(cluster_name, service_name):
if cluster_name:
return ROLES_PATH % (cluster_name, service_name)
else:
return CM_ROLES_PATH
def _get_role_path(cluster_name, service_name, role_name):
path = _get_roles_path(cluster_name, service_name)
return "%s/%s" % (path, role_name)
def create_role(resource_root,
service_name,
role_type,
role_name,
host_id,
cluster_name="default"):
"""
Create a role
@param resource_root: The root Resource object.
@param service_name: Service name
@param role_type: Role type
@param role_name: Role name
@param cluster_name: Cluster name
@return: An ApiRole object
"""
apirole = ApiRole(resource_root, role_name, role_type,
ApiHostRef(resource_root, host_id))
return call(resource_root.post,
_get_roles_path(cluster_name, service_name),
ApiRole, True, data=[apirole])[0]
def get_role(resource_root, service_name, name, cluster_name="default"):
"""
Lookup a role by name
@param resource_root: The root Resource object.
@param service_name: Service name
@param name: Role name
@param cluster_name: Cluster name
@return: An ApiRole object
"""
return _get_role(resource_root, _get_role_path(cluster_name, service_name, name))
def _get_role(resource_root, path):
return call(resource_root.get, path, ApiRole)
def get_all_roles(resource_root, service_name, cluster_name="default", view=None):
"""
Get all roles
@param resource_root: The root Resource object.
@param service_name: Service name
@param cluster_name: Cluster name
@return: A list of ApiRole objects.
"""
return call(resource_root.get,
_get_roles_path(cluster_name, service_name),
ApiRole, True, params=view and dict(view=view) or None)
def get_roles_by_type(resource_root, service_name, role_type,
cluster_name="default", view=None):
"""
Get all roles of a certain type in a service
@param resource_root: The root Resource object.
@param service_name: Service name
@param role_type: Role type
@param cluster_name: Cluster name
@return: A list of ApiRole objects.
"""
roles = get_all_roles(resource_root, service_name, cluster_name, view)
return [ r for r in roles if r.type == role_type ]
def delete_role(resource_root, service_name, name, cluster_name="default"):
"""
Delete a role by name
@param resource_root: The root Resource object.
@param service_name: Service name
@param name: Role name
@param cluster_name: Cluster name
@return: The deleted ApiRole object
"""
return call(resource_root.delete,
_get_role_path(cluster_name, service_name, name), ApiRole)
class ApiRole(BaseApiResource):
_ATTRIBUTES = {
'name' : None,
'type' : None,
'hostRef' : Attr(ApiHostRef),
'roleState' : ROAttr(),
'healthSummary' : ROAttr(),
'healthChecks' : ROAttr(),
'serviceRef' : ROAttr(ApiServiceRef),
'configStale' : ROAttr(),
'configStalenessStatus' : ROAttr(),
'haStatus' : ROAttr(),
'roleUrl' : ROAttr(),
'commissionState' : ROAttr(),
'maintenanceMode' : ROAttr(),
'maintenanceOwners' : ROAttr(),
'roleConfigGroupRef' : ROAttr(ApiRoleConfigGroupRef),
'zooKeeperServerMode' : ROAttr(),
}
def __init__(self, resource_root, name=None, type=None, hostRef=None):
BaseApiObject.init(self, resource_root, locals())
def __str__(self):
return "<ApiRole>: %s (cluster: %s; service: %s)" % (
self.name, self.serviceRef.clusterName, self.serviceRef.serviceName)
def _path(self):
return _get_role_path(self.serviceRef.clusterName,
self.serviceRef.serviceName,
self.name)
def _get_log(self, log):
path = "%s/logs/%s" % (self._path(), log)
return self._get_resource_root().get(path)
def get_commands(self, view=None):
"""
Retrieve a list of running commands for this role.
@param view: View to materialize ('full' or 'summary')
@return: A list of running commands.
"""
return self._get("commands", ApiCommand, True,
params = view and dict(view=view) or None)
def get_config(self, view = None):
"""
Retrieve the role's configuration.
The 'summary' view contains strings as the dictionary values. The full
view contains ApiConfig instances as the values.
@param view: View to materialize ('full' or 'summary')
@return: Dictionary with configuration data.
"""
return self._get_config("config", view)
def update_config(self, config):
"""
Update the role's configuration.
@param config: Dictionary with configuration to update.
@return: Dictionary with updated configuration.
"""
return self._update_config("config", config)
def get_full_log(self):
"""
Retrieve the contents of the role's log file.
@return: Contents of log file.
"""
return self._get_log('full')
def get_stdout(self):
"""
Retrieve the contents of the role's standard output.
@return: Contents of stdout.
"""
return self._get_log('stdout')
def get_stderr(self):
"""
Retrieve the contents of the role's standard error.
@return: Contents of stderr.
"""
return self._get_log('stderr')
def get_metrics(self, from_time=None, to_time=None, metrics=None, view=None):
"""
This endpoint is not supported as of v6. Use the timeseries API
instead. To get all metrics for a role with the timeseries API use
the query:
'select * where roleName = $ROLE_NAME'.
To get specific metrics for a role use a comma-separated list of
the metric names as follows:
'select $METRIC_NAME1, $METRIC_NAME2 where roleName = $ROLE_NAME'.
For more information see http://tiny.cloudera.com/tsquery_doc
@param from_time: A datetime; start of the period to query (optional).
@param to_time: A datetime; end of the period to query (default = now).
@param metrics: List of metrics to query (default = all).
@param view: View to materialize ('full' or 'summary')
@return: List of metrics and their readings.
"""
return self._get_resource_root().get_metrics(self._path() + '/metrics',
from_time, to_time, metrics, view)
def enter_maintenance_mode(self):
"""
Put the role in maintenance mode.
@return: Reference to the completed command.
@since: API v2
"""
cmd = self._cmd('enterMaintenanceMode')
if cmd.success:
self._update(_get_role(self._get_resource_root(), self._path()))
return cmd
def exit_maintenance_mode(self):
"""
Take the role out of maintenance mode.
@return: Reference to the completed command.
@since: API v2
"""
cmd = self._cmd('exitMaintenanceMode')
if cmd.success:
self._update(_get_role(self._get_resource_root(), self._path()))
return cmd
def list_commands_by_name(self):
"""
Lists all the commands that can be executed by name
on the provided role.
@return: A list of command metadata objects
@since: API v6
"""
return self._get("commandsByName", ApiCommandMetadata, True, api_version=6)
| apache-2.0 |
rememberlenny/google-course-builder | modules/oeditor/oeditor.py | 9 | 10589 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generic object editor view that uses REST services."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import os
import urllib
import appengine_config
from common import jinja_utils
from common import schema_fields
from common import tags
from controllers import utils
import jinja2
from models import custom_modules
from models import transforms
import webapp2
# a set of YUI and inputex modules required by the editor
COMMON_REQUIRED_MODULES = [
'inputex-group', 'inputex-form', 'inputex-jsonschema']
ALL_MODULES = [
'querystring-stringify-simple', 'inputex-select', 'inputex-string',
'inputex-radio', 'inputex-date', 'inputex-datepicker', 'inputex-checkbox',
'inputex-list', 'inputex-color', 'gcb-rte', 'inputex-textarea',
'inputex-url', 'inputex-uneditable', 'inputex-integer', 'inputex-hidden',
'inputex-file', 'io-upload-iframe']
class ObjectEditor(object):
"""Generic object editor powered by jsonschema."""
@classmethod
def get_html_for(
cls, handler, schema_json, annotations, object_key,
rest_url, exit_url,
extra_args=None,
save_method='put',
delete_url=None, delete_message=None, delete_method='post',
auto_return=False, read_only=False,
required_modules=None,
extra_js_files=None,
delete_button_caption='Delete',
save_button_caption='Save',
exit_button_caption='Close'):
"""Creates an HTML code needed to embed and operate this form.
This method creates an HTML, JS and CSS required to embed JSON
schema-based object editor into a view.
Args:
handler: a BaseHandler class, which will host this HTML, JS and CSS
schema_json: a text of JSON schema for the object being edited
annotations: schema annotations dictionary
object_key: a key of an object being edited
rest_url: a REST endpoint for object GET/PUT operation
exit_url: a URL to go to after the editor form is dismissed
extra_args: extra request params passed back in GET and POST
save_method: how the data should be saved to the server (put|upload)
delete_url: optional URL for delete operation
delete_message: string. Optional custom delete confirmation message
delete_method: optional HTTP method for delete operation
auto_return: whether to return to the exit_url on successful save
read_only: optional flag; if set, removes Save and Delete operations
required_modules: list of inputex modules required for this editor
extra_js_files: list of extra JS files to be included
delete_button_caption: string. A caption for the 'Delete' button
save_button_caption: a caption for the 'Save' button
exit_button_caption: a caption for the 'Close' button
Returns:
The HTML, JS and CSS text that will instantiate an object editor.
"""
required_modules = required_modules or ALL_MODULES
if not delete_message:
kind = transforms.loads(schema_json).get('description')
if not kind:
kind = 'Generic Object'
delete_message = 'Are you sure you want to delete this %s?' % kind
# construct parameters
get_url = rest_url
get_args = {'key': object_key}
post_url = rest_url
post_args = {'key': object_key}
if extra_args:
get_args.update(extra_args)
post_args.update(extra_args)
if read_only:
post_url = ''
post_args = ''
custom_rte_tag_icons = []
for tag, tag_class in tags.get_tag_bindings().items():
custom_rte_tag_icons.append({
'name': tag,
'iconUrl': tag_class().get_icon_url()})
template_values = {
'enabled': custom_module.enabled,
'schema': schema_json,
'get_url': '%s?%s' % (get_url, urllib.urlencode(get_args, True)),
'save_url': post_url,
'save_args': transforms.dumps(post_args),
'exit_button_caption': exit_button_caption,
'exit_url': exit_url,
'required_modules': COMMON_REQUIRED_MODULES + required_modules,
'extra_js_files': extra_js_files or [],
'schema_annotations': [
(item[0], transforms.dumps(item[1])) for item in annotations],
'save_method': save_method,
'auto_return': auto_return,
'delete_button_caption': delete_button_caption,
'save_button_caption': save_button_caption,
'custom_rte_tag_icons': transforms.dumps(custom_rte_tag_icons),
'delete_message': delete_message,
}
if delete_url and not read_only:
template_values['delete_url'] = delete_url
if delete_method:
template_values['delete_method'] = delete_method
if appengine_config.BUNDLE_LIB_FILES:
template_values['bundle_lib_files'] = True
return jinja2.utils.Markup(handler.get_template(
'oeditor.html', [os.path.dirname(__file__)]
).render(template_values))
class PopupHandler(webapp2.RequestHandler, utils.ReflectiveRequestHandler):
"""A handler to serve the content of the popup subeditor."""
default_action = 'custom_tag'
get_actions = ['edit_custom_tag', 'add_custom_tag']
post_actions = []
def get_template(self, template_name, dirs):
"""Sets up an environment and Gets jinja template."""
return jinja_utils.get_template(
template_name, dirs + [os.path.dirname(__file__)])
def get_edit_custom_tag(self):
"""Return the the page used to edit a custom HTML tag in a popup."""
tag_name = self.request.get('tag_name')
tag_bindings = tags.get_tag_bindings()
tag_class = tag_bindings[tag_name]
schema = tag_class().get_schema(self)
if schema.has_subregistries():
raise NotImplementedError()
template_values = {}
template_values['form_html'] = ObjectEditor.get_html_for(
self, schema.get_json_schema(), schema.get_schema_dict(), None,
None, None)
self.response.out.write(
self.get_template('popup.html', []).render(template_values))
def get_add_custom_tag(self):
"""Return the page for the popup used to add a custom HTML tag."""
tag_name = self.request.get('tag_name')
tag_bindings = tags.get_tag_bindings()
select_data = []
for name in tag_bindings.keys():
clazz = tag_bindings[name]
select_data.append((name, '%s: %s' % (
clazz.vendor(), clazz.name())))
select_data = sorted(select_data, key=lambda pair: pair[1])
if tag_name:
tag_class = tag_bindings[tag_name]
else:
tag_class = tag_bindings[select_data[0][0]]
tag_schema = tag_class().get_schema(self)
schema = schema_fields.FieldRegistry('Add a Component')
type_select = schema.add_sub_registry('type', 'Component Type')
type_select.add_property(schema_fields.SchemaField(
'tag', 'Name', 'string', select_data=select_data))
schema.add_sub_registry('attributes', registry=tag_schema)
template_values = {}
template_values['form_html'] = ObjectEditor.get_html_for(
self, schema.get_json_schema(), schema.get_schema_dict(), None,
None, None, required_modules=tag_class.required_modules(),
extra_js_files=['add_custom_tag.js'])
self.response.out.write(
self.get_template('popup.html', []).render(template_values))
def create_bool_select_annotation(
keys_list, label, true_label, false_label, class_name=None,
description=None):
"""Creates inputex annotation to display bool type as a select."""
properties = {
'label': label, 'choices': [
{'value': True, 'label': true_label},
{'value': False, 'label': false_label}]}
if class_name:
properties['className'] = class_name
if description:
properties['description'] = description
return (keys_list, {'type': 'select', '_inputex': properties})
custom_module = None
def register_module():
"""Registers this module in the registry."""
from controllers import sites # pylint: disable-msg=g-import-not-at-top
yui_handlers = [
('/static/inputex-3.1.0/(.*)', sites.make_zip_handler(
os.path.join(
appengine_config.BUNDLE_ROOT, 'lib/inputex-3.1.0.zip'))),
('/static/yui_3.6.0/(.*)', sites.make_zip_handler(
os.path.join(
appengine_config.BUNDLE_ROOT, 'lib/yui_3.6.0.zip'))),
('/static/2in3/(.*)', sites.make_zip_handler(
os.path.join(
appengine_config.BUNDLE_ROOT, 'lib/yui_2in3-2.9.0.zip')))]
if appengine_config.BUNDLE_LIB_FILES:
yui_handlers += [
('/static/combo/inputex', sites.make_css_combo_zip_handler(
os.path.join(
appengine_config.BUNDLE_ROOT, 'lib/inputex-3.1.0.zip'),
'/static/inputex-3.1.0/')),
('/static/combo/yui', sites.make_css_combo_zip_handler(
os.path.join(appengine_config.BUNDLE_ROOT, 'lib/yui_3.6.0.zip'),
'/yui/')),
('/static/combo/2in3', sites.make_css_combo_zip_handler(
os.path.join(
appengine_config.BUNDLE_ROOT, 'lib/yui_2in3-2.9.0.zip'),
'/static/2in3/'))]
oeditor_handlers = [('/oeditorpopup', PopupHandler)]
global custom_module
custom_module = custom_modules.Module(
'Object Editor',
'A visual editor for editing various types of objects.',
yui_handlers, oeditor_handlers)
return custom_module
| apache-2.0 |
WSDC-NITWarangal/django | tests/utils_tests/test_checksums.py | 205 | 1267 | import unittest
from django.test import ignore_warnings
from django.utils.deprecation import RemovedInDjango110Warning
class TestUtilsChecksums(unittest.TestCase):
def check_output(self, function, value, output=None):
"""
Check that function(value) equals output. If output is None,
check that function(value) equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_luhn(self):
from django.utils import checksums
f = checksums.luhn
items = (
(4111111111111111, True), ('4111111111111111', True),
(4222222222222, True), (378734493671000, True),
(5424000000000015, True), (5555555555554444, True),
(1008, True), ('0000001008', True), ('000000001008', True),
(4012888888881881, True), (1234567890123456789012345678909, True),
(4111111111211111, False), (42222222222224, False),
(100, False), ('100', False), ('0000100', False),
('abc', False), (None, False), (object(), False),
)
for value, output in items:
self.check_output(f, value, output)
| bsd-3-clause |
nerdvegas/rez | src/rez/vendor/amqp/utils.py | 36 | 2685 | from __future__ import absolute_import
import sys
try:
import fcntl
except ImportError:
fcntl = None # noqa
class promise(object):
if not hasattr(sys, 'pypy_version_info'):
__slots__ = tuple(
'fun args kwargs value ready failed '
' on_success on_error calls'.split()
)
def __init__(self, fun, args=(), kwargs=(),
on_success=None, on_error=None):
self.fun = fun
self.args = args
self.kwargs = kwargs
self.ready = False
self.failed = False
self.on_success = on_success
self.on_error = on_error
self.value = None
self.calls = 0
def __repr__(self):
return '<$: {0.fun.__name__}(*{0.args!r}, **{0.kwargs!r})'.format(
self,
)
def __call__(self, *args, **kwargs):
try:
self.value = self.fun(
*self.args + args if self.args else args,
**dict(self.kwargs, **kwargs) if self.kwargs else kwargs
)
except Exception as exc:
self.set_error_state(exc)
else:
if self.on_success:
self.on_success(self.value)
finally:
self.ready = True
self.calls += 1
def then(self, callback=None, on_error=None):
self.on_success = callback
self.on_error = on_error
return callback
def set_error_state(self, exc):
self.failed = True
if self.on_error is None:
raise
self.on_error(exc)
def throw(self, exc):
try:
raise exc
except exc.__class__ as with_cause:
self.set_error_state(with_cause)
def noop():
return promise(lambda *a, **k: None)
try:
from os import set_cloexec # Python 3.4?
except ImportError:
def set_cloexec(fd, cloexec): # noqa
try:
FD_CLOEXEC = fcntl.FD_CLOEXEC
except AttributeError:
raise NotImplementedError(
'close-on-exec flag not supported on this platform',
)
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
if cloexec:
flags |= FD_CLOEXEC
else:
flags &= ~FD_CLOEXEC
return fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def get_errno(exc):
""":exc:`socket.error` and :exc:`IOError` first got
the ``.errno`` attribute in Py2.7"""
try:
return exc.errno
except AttributeError:
try:
# e.args = (errno, reason)
if isinstance(exc.args, tuple) and len(exc.args) == 2:
return exc.args[0]
except AttributeError:
pass
return 0
| lgpl-3.0 |
sadmansk/servo | tests/wpt/web-platform-tests/webdriver/tests/release_actions/conftest.py | 41 | 1038 | import pytest
@pytest.fixture
def key_chain(session):
return session.actions.sequence("key", "keyboard_id")
@pytest.fixture
def mouse_chain(session):
return session.actions.sequence(
"pointer",
"pointer_id",
{"pointerType": "mouse"})
@pytest.fixture
def none_chain(session):
return session.actions.sequence("none", "none_id")
@pytest.fixture(autouse=True)
def release_actions(session, request):
# release all actions after each test
# equivalent to a teardown_function, but with access to session fixture
request.addfinalizer(session.actions.release)
@pytest.fixture
def key_reporter(session, test_actions_page, request):
"""Represents focused input element from `test_keys_page` fixture."""
input_el = session.find.css("#keys", all=False)
input_el.click()
session.execute_script("resetEvents();")
return input_el
@pytest.fixture
def test_actions_page(session, url):
session.url = url("/webdriver/tests/release_actions/support/test_actions_wdspec.html")
| mpl-2.0 |
aptrishu/coala-bears | bears/c_languages/ClangBear.py | 16 | 3060 | from clang.cindex import Index, LibclangError
from coalib.bears.LocalBear import LocalBear
from coalib.results.Diff import Diff
from coalib.results.Result import Result
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.results.SourceRange import SourceRange
from coalib.settings.Setting import typed_list
def clang_available(cls):
"""
Checks if Clang is available and ready to use.
:return: True if Clang is available, a description of the error else.
"""
try:
Index.create()
return True
except LibclangError as error: # pragma: no cover
return str(error)
class ClangBear(LocalBear):
LANGUAGES = {'C', 'C++', 'Objective-C', 'Objective-C++', 'OpenMP',
'OpenCL', 'CUDA'}
# Depends on libclang-py3, which is a dependency of coala
REQUIREMENTS = set()
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_FIX = {'Variable Misuse', 'Syntax'}
check_prerequisites = classmethod(clang_available)
def run(self, filename, file, clang_cli_options: typed_list(str)=None):
"""
Check code for syntactical or semantical problems using Clang.
This bear supports automatic fixes.
:param clang_cli_options: Any options that will be passed through to
Clang.
"""
index = Index.create()
diagnostics = index.parse(
filename,
args=clang_cli_options,
unsaved_files=[(filename, ''.join(file))]).diagnostics
for diag in diagnostics:
severity = {0: RESULT_SEVERITY.INFO,
1: RESULT_SEVERITY.INFO,
2: RESULT_SEVERITY.NORMAL,
3: RESULT_SEVERITY.MAJOR,
4: RESULT_SEVERITY.MAJOR}.get(diag.severity)
affected_code = tuple(SourceRange.from_clang_range(range)
for range in diag.ranges)
diffs = None
fixits = list(diag.fixits)
if len(fixits) > 0:
# FIXME: coala doesn't support choice of diffs, for now
# append first one only, often there's only one anyway
diffs = {filename: Diff.from_clang_fixit(fixits[0], file)}
# No affected code yet? Let's derive it from the fix!
if len(affected_code) == 0:
affected_code = diffs[filename].affected_code(filename)
# Still no affected code? Position is the best we can get...
if len(affected_code) == 0 and diag.location.file is not None:
affected_code = (SourceRange.from_values(
diag.location.file.name,
diag.location.line,
diag.location.column),)
yield Result(
self,
diag.spelling,
severity=severity,
affected_code=affected_code,
diffs=diffs)
| agpl-3.0 |
joomel1/phantomjs | src/qt/qtwebkit/Tools/QueueStatusServer/config/logging.py | 122 | 1582 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Specified in seconds
queue_log_duration = 60 * 60
| bsd-3-clause |
jablonskim/jupyweave | jupyweave/output_manager.py | 1 | 1220 | from os import makedirs
from os.path import join, dirname
import uuid
class OutputManager:
"""Responsible for managing """
def __init__(self, output_settings, input_filename):
self.__data_dir = output_settings.data_directory(input_filename)
self.__data_dir_url = output_settings.data_dir_url(input_filename)
self.__output_filename = output_settings.output_filename(input_filename)
def save_data(self, data, extension, filename=None):
"""Saves data to file, using output settings for path building"""
makedirs(self.__data_dir, exist_ok=True)
if filename is None:
filename = str.format('img_{0}{1}', str(uuid.uuid4()), extension)
else:
filename = str.format('{0}{1}', filename, extension)
file_path = join(self.__data_dir, filename)
file_url = join(self.__data_dir_url, filename)
with open(file_path, 'wb') as f:
f.write(data)
return file_url
def save_document(self, data):
"""Saves document to file"""
makedirs(dirname(self.__output_filename), exist_ok=True)
with open(self.__output_filename, 'w', encoding='utf8') as f:
f.write(data)
| mit |
fengshao0907/cockroach-python | cockroach/proto/errors_pb2.py | 2 | 38352 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: cockroach/proto/errors.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import cockroach.proto.config_pb2
import cockroach.proto.data_pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='cockroach/proto/errors.proto',
package='cockroach.proto',
serialized_pb=_b('\n\x1c\x63ockroach/proto/errors.proto\x12\x0f\x63ockroach.proto\x1a\x1c\x63ockroach/proto/config.proto\x1a\x1a\x63ockroach/proto/data.proto\"e\n\x0eNotLeaderError\x12)\n\x07replica\x18\x01 \x01(\x0b\x32\x18.cockroach.proto.Replica\x12(\n\x06leader\x18\x02 \x01(\x0b\x32\x18.cockroach.proto.Replica\"%\n\x12RangeNotFoundError\x12\x0f\n\x07raft_id\x18\x01 \x01(\x03\"|\n\x15RangeKeyMismatchError\x12\x19\n\x11request_start_key\x18\x01 \x01(\x0c\x12\x17\n\x0frequest_end_key\x18\x02 \x01(\x0c\x12/\n\x05range\x18\x03 \x01(\x0b\x32 .cockroach.proto.RangeDescriptor\"\x8b\x01\n\"ReadWithinUncertaintyIntervalError\x12-\n\ttimestamp\x18\x01 \x01(\x0b\x32\x1a.cockroach.proto.Timestamp\x12\x36\n\x12\x65xisting_timestamp\x18\x02 \x01(\x0b\x32\x1a.cockroach.proto.Timestamp\"D\n\x17TransactionAbortedError\x12)\n\x03txn\x18\x01 \x01(\x0b\x32\x1c.cockroach.proto.Transaction\"s\n\x14TransactionPushError\x12)\n\x03txn\x18\x01 \x01(\x0b\x32\x1c.cockroach.proto.Transaction\x12\x30\n\npushee_txn\x18\x02 \x01(\x0b\x32\x1c.cockroach.proto.Transaction\"B\n\x15TransactionRetryError\x12)\n\x03txn\x18\x01 \x01(\x0b\x32\x1c.cockroach.proto.Transaction\"P\n\x16TransactionStatusError\x12)\n\x03txn\x18\x01 \x01(\x0b\x32\x1c.cockroach.proto.Transaction\x12\x0b\n\x03msg\x18\x02 \x01(\t\"\xa1\x01\n\x10WriteIntentError\x12\x39\n\x07intents\x18\x01 \x03(\x0b\x32(.cockroach.proto.WriteIntentError.Intent\x12\x10\n\x08resolved\x18\x02 \x01(\x08\x1a@\n\x06Intent\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12)\n\x03txn\x18\x02 \x01(\x0b\x32\x1c.cockroach.proto.Transaction\"y\n\x10WriteTooOldError\x12-\n\ttimestamp\x18\x01 \x01(\x0b\x32\x1a.cockroach.proto.Timestamp\x12\x36\n\x12\x65xisting_timestamp\x18\x02 \x01(\x0b\x32\x1a.cockroach.proto.Timestamp\"\x14\n\x12OpRequiresTxnError\"D\n\x14\x43onditionFailedError\x12,\n\x0c\x61\x63tual_value\x18\x01 \x01(\x0b\x32\x16.cockroach.proto.Value\"i\n\x12LeaseRejectedError\x12)\n\tRequested\x18\x01 \x01(\x0b\x32\x16.cockroach.proto.Lease\x12(\n\x08\x45xisting\x18\x02 \x01(\x0b\x32\x16.cockroach.proto.Lease\"\x85\x07\n\x0b\x45rrorDetail\x12\x35\n\nnot_leader\x18\x01 \x01(\x0b\x32\x1f.cockroach.proto.NotLeaderErrorH\x00\x12>\n\x0frange_not_found\x18\x02 \x01(\x0b\x32#.cockroach.proto.RangeNotFoundErrorH\x00\x12\x44\n\x12range_key_mismatch\x18\x03 \x01(\x0b\x32&.cockroach.proto.RangeKeyMismatchErrorH\x00\x12_\n read_within_uncertainty_interval\x18\x04 \x01(\x0b\x32\x33.cockroach.proto.ReadWithinUncertaintyIntervalErrorH\x00\x12G\n\x13transaction_aborted\x18\x05 \x01(\x0b\x32(.cockroach.proto.TransactionAbortedErrorH\x00\x12\x41\n\x10transaction_push\x18\x06 \x01(\x0b\x32%.cockroach.proto.TransactionPushErrorH\x00\x12\x43\n\x11transaction_retry\x18\x07 \x01(\x0b\x32&.cockroach.proto.TransactionRetryErrorH\x00\x12\x45\n\x12transaction_status\x18\x08 \x01(\x0b\x32\'.cockroach.proto.TransactionStatusErrorH\x00\x12\x39\n\x0cwrite_intent\x18\t \x01(\x0b\x32!.cockroach.proto.WriteIntentErrorH\x00\x12:\n\rwrite_too_old\x18\n \x01(\x0b\x32!.cockroach.proto.WriteTooOldErrorH\x00\x12>\n\x0fop_requires_txn\x18\x0b \x01(\x0b\x32#.cockroach.proto.OpRequiresTxnErrorH\x00\x12\x41\n\x10\x63ondition_failed\x18\x0c \x01(\x0b\x32%.cockroach.proto.ConditionFailedErrorH\x00\x12=\n\x0elease_rejected\x18\r \x01(\x0b\x32#.cockroach.proto.LeaseRejectedErrorH\x00\x42\x07\n\x05value\"\x9b\x01\n\x05\x45rror\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x11\n\tretryable\x18\x02 \x01(\x08\x12@\n\x13transaction_restart\x18\x04 \x01(\x0e\x32#.cockroach.proto.TransactionRestart\x12,\n\x06\x64\x65tail\x18\x03 \x01(\x0b\x32\x1c.cockroach.proto.ErrorDetail*;\n\x12TransactionRestart\x12\t\n\x05\x41\x42ORT\x10\x00\x12\x0b\n\x07\x42\x41\x43KOFF\x10\x01\x12\r\n\tIMMEDIATE\x10\x02\x42\x07Z\x05proto')
,
dependencies=[cockroach.proto.config_pb2.DESCRIPTOR,cockroach.proto.data_pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_TRANSACTIONRESTART = _descriptor.EnumDescriptor(
name='TransactionRestart',
full_name='cockroach.proto.TransactionRestart',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ABORT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BACKOFF', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IMMEDIATE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2402,
serialized_end=2461,
)
_sym_db.RegisterEnumDescriptor(_TRANSACTIONRESTART)
TransactionRestart = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONRESTART)
ABORT = 0
BACKOFF = 1
IMMEDIATE = 2
_NOTLEADERERROR = _descriptor.Descriptor(
name='NotLeaderError',
full_name='cockroach.proto.NotLeaderError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='replica', full_name='cockroach.proto.NotLeaderError.replica', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='leader', full_name='cockroach.proto.NotLeaderError.leader', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=107,
serialized_end=208,
)
_RANGENOTFOUNDERROR = _descriptor.Descriptor(
name='RangeNotFoundError',
full_name='cockroach.proto.RangeNotFoundError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='raft_id', full_name='cockroach.proto.RangeNotFoundError.raft_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=210,
serialized_end=247,
)
_RANGEKEYMISMATCHERROR = _descriptor.Descriptor(
name='RangeKeyMismatchError',
full_name='cockroach.proto.RangeKeyMismatchError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request_start_key', full_name='cockroach.proto.RangeKeyMismatchError.request_start_key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='request_end_key', full_name='cockroach.proto.RangeKeyMismatchError.request_end_key', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='range', full_name='cockroach.proto.RangeKeyMismatchError.range', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=249,
serialized_end=373,
)
_READWITHINUNCERTAINTYINTERVALERROR = _descriptor.Descriptor(
name='ReadWithinUncertaintyIntervalError',
full_name='cockroach.proto.ReadWithinUncertaintyIntervalError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='cockroach.proto.ReadWithinUncertaintyIntervalError.timestamp', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='existing_timestamp', full_name='cockroach.proto.ReadWithinUncertaintyIntervalError.existing_timestamp', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=376,
serialized_end=515,
)
_TRANSACTIONABORTEDERROR = _descriptor.Descriptor(
name='TransactionAbortedError',
full_name='cockroach.proto.TransactionAbortedError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txn', full_name='cockroach.proto.TransactionAbortedError.txn', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=517,
serialized_end=585,
)
_TRANSACTIONPUSHERROR = _descriptor.Descriptor(
name='TransactionPushError',
full_name='cockroach.proto.TransactionPushError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txn', full_name='cockroach.proto.TransactionPushError.txn', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pushee_txn', full_name='cockroach.proto.TransactionPushError.pushee_txn', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=587,
serialized_end=702,
)
_TRANSACTIONRETRYERROR = _descriptor.Descriptor(
name='TransactionRetryError',
full_name='cockroach.proto.TransactionRetryError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txn', full_name='cockroach.proto.TransactionRetryError.txn', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=704,
serialized_end=770,
)
_TRANSACTIONSTATUSERROR = _descriptor.Descriptor(
name='TransactionStatusError',
full_name='cockroach.proto.TransactionStatusError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txn', full_name='cockroach.proto.TransactionStatusError.txn', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='msg', full_name='cockroach.proto.TransactionStatusError.msg', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=772,
serialized_end=852,
)
_WRITEINTENTERROR_INTENT = _descriptor.Descriptor(
name='Intent',
full_name='cockroach.proto.WriteIntentError.Intent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='cockroach.proto.WriteIntentError.Intent.key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='txn', full_name='cockroach.proto.WriteIntentError.Intent.txn', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=952,
serialized_end=1016,
)
_WRITEINTENTERROR = _descriptor.Descriptor(
name='WriteIntentError',
full_name='cockroach.proto.WriteIntentError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='intents', full_name='cockroach.proto.WriteIntentError.intents', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resolved', full_name='cockroach.proto.WriteIntentError.resolved', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_WRITEINTENTERROR_INTENT, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=855,
serialized_end=1016,
)
_WRITETOOOLDERROR = _descriptor.Descriptor(
name='WriteTooOldError',
full_name='cockroach.proto.WriteTooOldError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='cockroach.proto.WriteTooOldError.timestamp', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='existing_timestamp', full_name='cockroach.proto.WriteTooOldError.existing_timestamp', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1018,
serialized_end=1139,
)
_OPREQUIRESTXNERROR = _descriptor.Descriptor(
name='OpRequiresTxnError',
full_name='cockroach.proto.OpRequiresTxnError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1141,
serialized_end=1161,
)
_CONDITIONFAILEDERROR = _descriptor.Descriptor(
name='ConditionFailedError',
full_name='cockroach.proto.ConditionFailedError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='actual_value', full_name='cockroach.proto.ConditionFailedError.actual_value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1163,
serialized_end=1231,
)
_LEASEREJECTEDERROR = _descriptor.Descriptor(
name='LeaseRejectedError',
full_name='cockroach.proto.LeaseRejectedError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='Requested', full_name='cockroach.proto.LeaseRejectedError.Requested', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='Existing', full_name='cockroach.proto.LeaseRejectedError.Existing', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1233,
serialized_end=1338,
)
_ERRORDETAIL = _descriptor.Descriptor(
name='ErrorDetail',
full_name='cockroach.proto.ErrorDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='not_leader', full_name='cockroach.proto.ErrorDetail.not_leader', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='range_not_found', full_name='cockroach.proto.ErrorDetail.range_not_found', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='range_key_mismatch', full_name='cockroach.proto.ErrorDetail.range_key_mismatch', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='read_within_uncertainty_interval', full_name='cockroach.proto.ErrorDetail.read_within_uncertainty_interval', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transaction_aborted', full_name='cockroach.proto.ErrorDetail.transaction_aborted', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transaction_push', full_name='cockroach.proto.ErrorDetail.transaction_push', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transaction_retry', full_name='cockroach.proto.ErrorDetail.transaction_retry', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transaction_status', full_name='cockroach.proto.ErrorDetail.transaction_status', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='write_intent', full_name='cockroach.proto.ErrorDetail.write_intent', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='write_too_old', full_name='cockroach.proto.ErrorDetail.write_too_old', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='op_requires_txn', full_name='cockroach.proto.ErrorDetail.op_requires_txn', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='condition_failed', full_name='cockroach.proto.ErrorDetail.condition_failed', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lease_rejected', full_name='cockroach.proto.ErrorDetail.lease_rejected', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='value', full_name='cockroach.proto.ErrorDetail.value',
index=0, containing_type=None, fields=[]),
],
serialized_start=1341,
serialized_end=2242,
)
_ERROR = _descriptor.Descriptor(
name='Error',
full_name='cockroach.proto.Error',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='message', full_name='cockroach.proto.Error.message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='retryable', full_name='cockroach.proto.Error.retryable', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transaction_restart', full_name='cockroach.proto.Error.transaction_restart', index=2,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='detail', full_name='cockroach.proto.Error.detail', index=3,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=2245,
serialized_end=2400,
)
_NOTLEADERERROR.fields_by_name['replica'].message_type = cockroach.proto.config_pb2._REPLICA
_NOTLEADERERROR.fields_by_name['leader'].message_type = cockroach.proto.config_pb2._REPLICA
_RANGEKEYMISMATCHERROR.fields_by_name['range'].message_type = cockroach.proto.config_pb2._RANGEDESCRIPTOR
_READWITHINUNCERTAINTYINTERVALERROR.fields_by_name['timestamp'].message_type = cockroach.proto.data_pb2._TIMESTAMP
_READWITHINUNCERTAINTYINTERVALERROR.fields_by_name['existing_timestamp'].message_type = cockroach.proto.data_pb2._TIMESTAMP
_TRANSACTIONABORTEDERROR.fields_by_name['txn'].message_type = cockroach.proto.data_pb2._TRANSACTION
_TRANSACTIONPUSHERROR.fields_by_name['txn'].message_type = cockroach.proto.data_pb2._TRANSACTION
_TRANSACTIONPUSHERROR.fields_by_name['pushee_txn'].message_type = cockroach.proto.data_pb2._TRANSACTION
_TRANSACTIONRETRYERROR.fields_by_name['txn'].message_type = cockroach.proto.data_pb2._TRANSACTION
_TRANSACTIONSTATUSERROR.fields_by_name['txn'].message_type = cockroach.proto.data_pb2._TRANSACTION
_WRITEINTENTERROR_INTENT.fields_by_name['txn'].message_type = cockroach.proto.data_pb2._TRANSACTION
_WRITEINTENTERROR_INTENT.containing_type = _WRITEINTENTERROR
_WRITEINTENTERROR.fields_by_name['intents'].message_type = _WRITEINTENTERROR_INTENT
_WRITETOOOLDERROR.fields_by_name['timestamp'].message_type = cockroach.proto.data_pb2._TIMESTAMP
_WRITETOOOLDERROR.fields_by_name['existing_timestamp'].message_type = cockroach.proto.data_pb2._TIMESTAMP
_CONDITIONFAILEDERROR.fields_by_name['actual_value'].message_type = cockroach.proto.data_pb2._VALUE
_LEASEREJECTEDERROR.fields_by_name['Requested'].message_type = cockroach.proto.data_pb2._LEASE
_LEASEREJECTEDERROR.fields_by_name['Existing'].message_type = cockroach.proto.data_pb2._LEASE
_ERRORDETAIL.fields_by_name['not_leader'].message_type = _NOTLEADERERROR
_ERRORDETAIL.fields_by_name['range_not_found'].message_type = _RANGENOTFOUNDERROR
_ERRORDETAIL.fields_by_name['range_key_mismatch'].message_type = _RANGEKEYMISMATCHERROR
_ERRORDETAIL.fields_by_name['read_within_uncertainty_interval'].message_type = _READWITHINUNCERTAINTYINTERVALERROR
_ERRORDETAIL.fields_by_name['transaction_aborted'].message_type = _TRANSACTIONABORTEDERROR
_ERRORDETAIL.fields_by_name['transaction_push'].message_type = _TRANSACTIONPUSHERROR
_ERRORDETAIL.fields_by_name['transaction_retry'].message_type = _TRANSACTIONRETRYERROR
_ERRORDETAIL.fields_by_name['transaction_status'].message_type = _TRANSACTIONSTATUSERROR
_ERRORDETAIL.fields_by_name['write_intent'].message_type = _WRITEINTENTERROR
_ERRORDETAIL.fields_by_name['write_too_old'].message_type = _WRITETOOOLDERROR
_ERRORDETAIL.fields_by_name['op_requires_txn'].message_type = _OPREQUIRESTXNERROR
_ERRORDETAIL.fields_by_name['condition_failed'].message_type = _CONDITIONFAILEDERROR
_ERRORDETAIL.fields_by_name['lease_rejected'].message_type = _LEASEREJECTEDERROR
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['not_leader'])
_ERRORDETAIL.fields_by_name['not_leader'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['range_not_found'])
_ERRORDETAIL.fields_by_name['range_not_found'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['range_key_mismatch'])
_ERRORDETAIL.fields_by_name['range_key_mismatch'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['read_within_uncertainty_interval'])
_ERRORDETAIL.fields_by_name['read_within_uncertainty_interval'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['transaction_aborted'])
_ERRORDETAIL.fields_by_name['transaction_aborted'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['transaction_push'])
_ERRORDETAIL.fields_by_name['transaction_push'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['transaction_retry'])
_ERRORDETAIL.fields_by_name['transaction_retry'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['transaction_status'])
_ERRORDETAIL.fields_by_name['transaction_status'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['write_intent'])
_ERRORDETAIL.fields_by_name['write_intent'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['write_too_old'])
_ERRORDETAIL.fields_by_name['write_too_old'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['op_requires_txn'])
_ERRORDETAIL.fields_by_name['op_requires_txn'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['condition_failed'])
_ERRORDETAIL.fields_by_name['condition_failed'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERRORDETAIL.oneofs_by_name['value'].fields.append(
_ERRORDETAIL.fields_by_name['lease_rejected'])
_ERRORDETAIL.fields_by_name['lease_rejected'].containing_oneof = _ERRORDETAIL.oneofs_by_name['value']
_ERROR.fields_by_name['transaction_restart'].enum_type = _TRANSACTIONRESTART
_ERROR.fields_by_name['detail'].message_type = _ERRORDETAIL
DESCRIPTOR.message_types_by_name['NotLeaderError'] = _NOTLEADERERROR
DESCRIPTOR.message_types_by_name['RangeNotFoundError'] = _RANGENOTFOUNDERROR
DESCRIPTOR.message_types_by_name['RangeKeyMismatchError'] = _RANGEKEYMISMATCHERROR
DESCRIPTOR.message_types_by_name['ReadWithinUncertaintyIntervalError'] = _READWITHINUNCERTAINTYINTERVALERROR
DESCRIPTOR.message_types_by_name['TransactionAbortedError'] = _TRANSACTIONABORTEDERROR
DESCRIPTOR.message_types_by_name['TransactionPushError'] = _TRANSACTIONPUSHERROR
DESCRIPTOR.message_types_by_name['TransactionRetryError'] = _TRANSACTIONRETRYERROR
DESCRIPTOR.message_types_by_name['TransactionStatusError'] = _TRANSACTIONSTATUSERROR
DESCRIPTOR.message_types_by_name['WriteIntentError'] = _WRITEINTENTERROR
DESCRIPTOR.message_types_by_name['WriteTooOldError'] = _WRITETOOOLDERROR
DESCRIPTOR.message_types_by_name['OpRequiresTxnError'] = _OPREQUIRESTXNERROR
DESCRIPTOR.message_types_by_name['ConditionFailedError'] = _CONDITIONFAILEDERROR
DESCRIPTOR.message_types_by_name['LeaseRejectedError'] = _LEASEREJECTEDERROR
DESCRIPTOR.message_types_by_name['ErrorDetail'] = _ERRORDETAIL
DESCRIPTOR.message_types_by_name['Error'] = _ERROR
DESCRIPTOR.enum_types_by_name['TransactionRestart'] = _TRANSACTIONRESTART
NotLeaderError = _reflection.GeneratedProtocolMessageType('NotLeaderError', (_message.Message,), dict(
DESCRIPTOR = _NOTLEADERERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.NotLeaderError)
))
_sym_db.RegisterMessage(NotLeaderError)
RangeNotFoundError = _reflection.GeneratedProtocolMessageType('RangeNotFoundError', (_message.Message,), dict(
DESCRIPTOR = _RANGENOTFOUNDERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.RangeNotFoundError)
))
_sym_db.RegisterMessage(RangeNotFoundError)
RangeKeyMismatchError = _reflection.GeneratedProtocolMessageType('RangeKeyMismatchError', (_message.Message,), dict(
DESCRIPTOR = _RANGEKEYMISMATCHERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.RangeKeyMismatchError)
))
_sym_db.RegisterMessage(RangeKeyMismatchError)
ReadWithinUncertaintyIntervalError = _reflection.GeneratedProtocolMessageType('ReadWithinUncertaintyIntervalError', (_message.Message,), dict(
DESCRIPTOR = _READWITHINUNCERTAINTYINTERVALERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.ReadWithinUncertaintyIntervalError)
))
_sym_db.RegisterMessage(ReadWithinUncertaintyIntervalError)
TransactionAbortedError = _reflection.GeneratedProtocolMessageType('TransactionAbortedError', (_message.Message,), dict(
DESCRIPTOR = _TRANSACTIONABORTEDERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.TransactionAbortedError)
))
_sym_db.RegisterMessage(TransactionAbortedError)
TransactionPushError = _reflection.GeneratedProtocolMessageType('TransactionPushError', (_message.Message,), dict(
DESCRIPTOR = _TRANSACTIONPUSHERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.TransactionPushError)
))
_sym_db.RegisterMessage(TransactionPushError)
TransactionRetryError = _reflection.GeneratedProtocolMessageType('TransactionRetryError', (_message.Message,), dict(
DESCRIPTOR = _TRANSACTIONRETRYERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.TransactionRetryError)
))
_sym_db.RegisterMessage(TransactionRetryError)
TransactionStatusError = _reflection.GeneratedProtocolMessageType('TransactionStatusError', (_message.Message,), dict(
DESCRIPTOR = _TRANSACTIONSTATUSERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.TransactionStatusError)
))
_sym_db.RegisterMessage(TransactionStatusError)
WriteIntentError = _reflection.GeneratedProtocolMessageType('WriteIntentError', (_message.Message,), dict(
Intent = _reflection.GeneratedProtocolMessageType('Intent', (_message.Message,), dict(
DESCRIPTOR = _WRITEINTENTERROR_INTENT,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.WriteIntentError.Intent)
))
,
DESCRIPTOR = _WRITEINTENTERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.WriteIntentError)
))
_sym_db.RegisterMessage(WriteIntentError)
_sym_db.RegisterMessage(WriteIntentError.Intent)
WriteTooOldError = _reflection.GeneratedProtocolMessageType('WriteTooOldError', (_message.Message,), dict(
DESCRIPTOR = _WRITETOOOLDERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.WriteTooOldError)
))
_sym_db.RegisterMessage(WriteTooOldError)
OpRequiresTxnError = _reflection.GeneratedProtocolMessageType('OpRequiresTxnError', (_message.Message,), dict(
DESCRIPTOR = _OPREQUIRESTXNERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.OpRequiresTxnError)
))
_sym_db.RegisterMessage(OpRequiresTxnError)
ConditionFailedError = _reflection.GeneratedProtocolMessageType('ConditionFailedError', (_message.Message,), dict(
DESCRIPTOR = _CONDITIONFAILEDERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.ConditionFailedError)
))
_sym_db.RegisterMessage(ConditionFailedError)
LeaseRejectedError = _reflection.GeneratedProtocolMessageType('LeaseRejectedError', (_message.Message,), dict(
DESCRIPTOR = _LEASEREJECTEDERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.LeaseRejectedError)
))
_sym_db.RegisterMessage(LeaseRejectedError)
ErrorDetail = _reflection.GeneratedProtocolMessageType('ErrorDetail', (_message.Message,), dict(
DESCRIPTOR = _ERRORDETAIL,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.ErrorDetail)
))
_sym_db.RegisterMessage(ErrorDetail)
Error = _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), dict(
DESCRIPTOR = _ERROR,
__module__ = 'cockroach.proto.errors_pb2'
# @@protoc_insertion_point(class_scope:cockroach.proto.Error)
))
_sym_db.RegisterMessage(Error)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('Z\005proto'))
# @@protoc_insertion_point(module_scope)
| apache-2.0 |
jeffery-do/Vizdoombot | doom/lib/python3.5/site-packages/numpy/polynomial/hermite_e.py | 23 | 58014 | """
Objects for dealing with Hermite_e series.
This module provides a number of objects (mostly functions) useful for
dealing with Hermite_e series, including a `HermiteE` class that
encapsulates the usual arithmetic operations. (General information
on how this module represents and works with such polynomials is in the
docstring for its "parent" sub-package, `numpy.polynomial`).
Constants
---------
- `hermedomain` -- Hermite_e series default domain, [-1,1].
- `hermezero` -- Hermite_e series that evaluates identically to 0.
- `hermeone` -- Hermite_e series that evaluates identically to 1.
- `hermex` -- Hermite_e series for the identity map, ``f(x) = x``.
Arithmetic
----------
- `hermemulx` -- multiply a Hermite_e series in ``P_i(x)`` by ``x``.
- `hermeadd` -- add two Hermite_e series.
- `hermesub` -- subtract one Hermite_e series from another.
- `hermemul` -- multiply two Hermite_e series.
- `hermediv` -- divide one Hermite_e series by another.
- `hermeval` -- evaluate a Hermite_e series at given points.
- `hermeval2d` -- evaluate a 2D Hermite_e series at given points.
- `hermeval3d` -- evaluate a 3D Hermite_e series at given points.
- `hermegrid2d` -- evaluate a 2D Hermite_e series on a Cartesian product.
- `hermegrid3d` -- evaluate a 3D Hermite_e series on a Cartesian product.
Calculus
--------
- `hermeder` -- differentiate a Hermite_e series.
- `hermeint` -- integrate a Hermite_e series.
Misc Functions
--------------
- `hermefromroots` -- create a Hermite_e series with specified roots.
- `hermeroots` -- find the roots of a Hermite_e series.
- `hermevander` -- Vandermonde-like matrix for Hermite_e polynomials.
- `hermevander2d` -- Vandermonde-like matrix for 2D power series.
- `hermevander3d` -- Vandermonde-like matrix for 3D power series.
- `hermegauss` -- Gauss-Hermite_e quadrature, points and weights.
- `hermeweight` -- Hermite_e weight function.
- `hermecompanion` -- symmetrized companion matrix in Hermite_e form.
- `hermefit` -- least-squares fit returning a Hermite_e series.
- `hermetrim` -- trim leading coefficients from a Hermite_e series.
- `hermeline` -- Hermite_e series of given straight line.
- `herme2poly` -- convert a Hermite_e series to a polynomial.
- `poly2herme` -- convert a polynomial to a Hermite_e series.
Classes
-------
- `HermiteE` -- A Hermite_e series class.
See also
--------
`numpy.polynomial`
"""
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
import numpy.linalg as la
from . import polyutils as pu
from ._polybase import ABCPolyBase
__all__ = [
'hermezero', 'hermeone', 'hermex', 'hermedomain', 'hermeline',
'hermeadd', 'hermesub', 'hermemulx', 'hermemul', 'hermediv',
'hermepow', 'hermeval', 'hermeder', 'hermeint', 'herme2poly',
'poly2herme', 'hermefromroots', 'hermevander', 'hermefit', 'hermetrim',
'hermeroots', 'HermiteE', 'hermeval2d', 'hermeval3d', 'hermegrid2d',
'hermegrid3d', 'hermevander2d', 'hermevander3d', 'hermecompanion',
'hermegauss', 'hermeweight']
hermetrim = pu.trimcoef
def poly2herme(pol):
"""
poly2herme(pol)
Convert a polynomial to a Hermite series.
Convert an array representing the coefficients of a polynomial (relative
to the "standard" basis) ordered from lowest degree to highest, to an
array of the coefficients of the equivalent Hermite series, ordered
from lowest to highest degree.
Parameters
----------
pol : array_like
1-D array containing the polynomial coefficients
Returns
-------
c : ndarray
1-D array containing the coefficients of the equivalent Hermite
series.
See Also
--------
herme2poly
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy.polynomial.hermite_e import poly2herme
>>> poly2herme(np.arange(4))
array([ 2., 10., 2., 3.])
"""
[pol] = pu.as_series([pol])
deg = len(pol) - 1
res = 0
for i in range(deg, -1, -1):
res = hermeadd(hermemulx(res), pol[i])
return res
def herme2poly(c):
"""
Convert a Hermite series to a polynomial.
Convert an array representing the coefficients of a Hermite series,
ordered from lowest degree to highest, to an array of the coefficients
of the equivalent polynomial (relative to the "standard" basis) ordered
from lowest to highest degree.
Parameters
----------
c : array_like
1-D array containing the Hermite series coefficients, ordered
from lowest order term to highest.
Returns
-------
pol : ndarray
1-D array containing the coefficients of the equivalent polynomial
(relative to the "standard" basis) ordered from lowest order term
to highest.
See Also
--------
poly2herme
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy.polynomial.hermite_e import herme2poly
>>> herme2poly([ 2., 10., 2., 3.])
array([ 0., 1., 2., 3.])
"""
from .polynomial import polyadd, polysub, polymulx
[c] = pu.as_series([c])
n = len(c)
if n == 1:
return c
if n == 2:
return c
else:
c0 = c[-2]
c1 = c[-1]
# i is the current degree of c1
for i in range(n - 1, 1, -1):
tmp = c0
c0 = polysub(c[i - 2], c1*(i - 1))
c1 = polyadd(tmp, polymulx(c1))
return polyadd(c0, polymulx(c1))
#
# These are constant arrays are of integer type so as to be compatible
# with the widest range of other types, such as Decimal.
#
# Hermite
hermedomain = np.array([-1, 1])
# Hermite coefficients representing zero.
hermezero = np.array([0])
# Hermite coefficients representing one.
hermeone = np.array([1])
# Hermite coefficients representing the identity x.
hermex = np.array([0, 1])
def hermeline(off, scl):
"""
Hermite series whose graph is a straight line.
Parameters
----------
off, scl : scalars
The specified line is given by ``off + scl*x``.
Returns
-------
y : ndarray
This module's representation of the Hermite series for
``off + scl*x``.
See Also
--------
polyline, chebline
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeline
>>> from numpy.polynomial.hermite_e import hermeline, hermeval
>>> hermeval(0,hermeline(3, 2))
3.0
>>> hermeval(1,hermeline(3, 2))
5.0
"""
if scl != 0:
return np.array([off, scl])
else:
return np.array([off])
def hermefromroots(roots):
"""
Generate a HermiteE series with given roots.
The function returns the coefficients of the polynomial
.. math:: p(x) = (x - r_0) * (x - r_1) * ... * (x - r_n),
in HermiteE form, where the `r_n` are the roots specified in `roots`.
If a zero has multiplicity n, then it must appear in `roots` n times.
For instance, if 2 is a root of multiplicity three and 3 is a root of
multiplicity 2, then `roots` looks something like [2, 2, 2, 3, 3]. The
roots can appear in any order.
If the returned coefficients are `c`, then
.. math:: p(x) = c_0 + c_1 * He_1(x) + ... + c_n * He_n(x)
The coefficient of the last term is not generally 1 for monic
polynomials in HermiteE form.
Parameters
----------
roots : array_like
Sequence containing the roots.
Returns
-------
out : ndarray
1-D array of coefficients. If all roots are real then `out` is a
real array, if some of the roots are complex, then `out` is complex
even if all the coefficients in the result are real (see Examples
below).
See Also
--------
polyfromroots, legfromroots, lagfromroots, hermfromroots,
chebfromroots.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermefromroots, hermeval
>>> coef = hermefromroots((-1, 0, 1))
>>> hermeval((-1, 0, 1), coef)
array([ 0., 0., 0.])
>>> coef = hermefromroots((-1j, 1j))
>>> hermeval((-1j, 1j), coef)
array([ 0.+0.j, 0.+0.j])
"""
if len(roots) == 0:
return np.ones(1)
else:
[roots] = pu.as_series([roots], trim=False)
roots.sort()
p = [hermeline(-r, 1) for r in roots]
n = len(p)
while n > 1:
m, r = divmod(n, 2)
tmp = [hermemul(p[i], p[i+m]) for i in range(m)]
if r:
tmp[0] = hermemul(tmp[0], p[-1])
p = tmp
n = m
return p[0]
def hermeadd(c1, c2):
"""
Add one Hermite series to another.
Returns the sum of two Hermite series `c1` + `c2`. The arguments
are sequences of coefficients ordered from lowest order term to
highest, i.e., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the Hermite series of their sum.
See Also
--------
hermesub, hermemul, hermediv, hermepow
Notes
-----
Unlike multiplication, division, etc., the sum of two Hermite series
is a Hermite series (without having to "reproject" the result onto
the basis set) so addition, just like that of "standard" polynomials,
is simply "component-wise."
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeadd
>>> hermeadd([1, 2, 3], [1, 2, 3, 4])
array([ 2., 4., 6., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] += c2
ret = c1
else:
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermesub(c1, c2):
"""
Subtract one Hermite series from another.
Returns the difference of two Hermite series `c1` - `c2`. The
sequences of coefficients are from lowest order term to highest, i.e.,
[1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Hermite series coefficients representing their difference.
See Also
--------
hermeadd, hermemul, hermediv, hermepow
Notes
-----
Unlike multiplication, division, etc., the difference of two Hermite
series is a Hermite series (without having to "reproject" the result
onto the basis set) so subtraction, just like that of "standard"
polynomials, is simply "component-wise."
Examples
--------
>>> from numpy.polynomial.hermite_e import hermesub
>>> hermesub([1, 2, 3, 4], [1, 2, 3])
array([ 0., 0., 0., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] -= c2
ret = c1
else:
c2 = -c2
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermemulx(c):
"""Multiply a Hermite series by x.
Multiply the Hermite series `c` by x, where x is the independent
variable.
Parameters
----------
c : array_like
1-D array of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the result of the multiplication.
Notes
-----
The multiplication uses the recursion relationship for Hermite
polynomials in the form
.. math::
xP_i(x) = (P_{i + 1}(x) + iP_{i - 1}(x)))
Examples
--------
>>> from numpy.polynomial.hermite_e import hermemulx
>>> hermemulx([1, 2, 3])
array([ 2., 7., 2., 3.])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
# The zero series needs special treatment
if len(c) == 1 and c[0] == 0:
return c
prd = np.empty(len(c) + 1, dtype=c.dtype)
prd[0] = c[0]*0
prd[1] = c[0]
for i in range(1, len(c)):
prd[i + 1] = c[i]
prd[i - 1] += c[i]*i
return prd
def hermemul(c1, c2):
"""
Multiply one Hermite series by another.
Returns the product of two Hermite series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Hermite series coefficients representing their product.
See Also
--------
hermeadd, hermesub, hermediv, hermepow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Hermite polynomial basis set. Thus, to express
the product as a Hermite series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermemul
>>> hermemul([1, 2, 3], [0, 1, 2])
array([ 14., 15., 28., 7., 6.])
"""
# s1, s2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else:
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = hermesub(c[-i]*xs, c1*(nd - 1))
c1 = hermeadd(tmp, hermemulx(c1))
return hermeadd(c0, hermemulx(c1))
def hermediv(c1, c2):
"""
Divide one Hermite series by another.
Returns the quotient-with-remainder of two Hermite series
`c1` / `c2`. The arguments are sequences of coefficients from lowest
order "term" to highest, e.g., [1,2,3] represents the series
``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
[quo, rem] : ndarrays
Of Hermite series coefficients representing the quotient and
remainder.
See Also
--------
hermeadd, hermesub, hermemul, hermepow
Notes
-----
In general, the (polynomial) division of one Hermite series by another
results in quotient and remainder terms that are not in the Hermite
polynomial basis set. Thus, to express these results as a Hermite
series, it is necessary to "reproject" the results onto the Hermite
basis set, which may produce "unintuitive" (but correct) results; see
Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermediv
>>> hermediv([ 14., 15., 28., 7., 6.], [0, 1, 2])
(array([ 1., 2., 3.]), array([ 0.]))
>>> hermediv([ 15., 17., 28., 7., 6.], [0, 1, 2])
(array([ 1., 2., 3.]), array([ 1., 2.]))
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0:
raise ZeroDivisionError()
lc1 = len(c1)
lc2 = len(c2)
if lc1 < lc2:
return c1[:1]*0, c1
elif lc2 == 1:
return c1/c2[-1], c1[:1]*0
else:
quo = np.empty(lc1 - lc2 + 1, dtype=c1.dtype)
rem = c1
for i in range(lc1 - lc2, - 1, -1):
p = hermemul([0]*i + [1], c2)
q = rem[-1]/p[-1]
rem = rem[:-1] - q*p[:-1]
quo[i] = q
return quo, pu.trimseq(rem)
def hermepow(c, pow, maxpower=16):
"""Raise a Hermite series to a power.
Returns the Hermite series `c` raised to the power `pow`. The
argument `c` is a sequence of coefficients ordered from low to high.
i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.``
Parameters
----------
c : array_like
1-D array of Hermite series coefficients ordered from low to
high.
pow : integer
Power to which the series will be raised
maxpower : integer, optional
Maximum power allowed. This is mainly to limit growth of the series
to unmanageable size. Default is 16
Returns
-------
coef : ndarray
Hermite series of power.
See Also
--------
hermeadd, hermesub, hermemul, hermediv
Examples
--------
>>> from numpy.polynomial.hermite_e import hermepow
>>> hermepow([1, 2, 3], 2)
array([ 23., 28., 46., 12., 9.])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
power = int(pow)
if power != pow or power < 0:
raise ValueError("Power must be a non-negative integer.")
elif maxpower is not None and power > maxpower:
raise ValueError("Power is too large")
elif power == 0:
return np.array([1], dtype=c.dtype)
elif power == 1:
return c
else:
# This can be made more efficient by using powers of two
# in the usual way.
prd = c
for i in range(2, power + 1):
prd = hermemul(prd, c)
return prd
def hermeder(c, m=1, scl=1, axis=0):
"""
Differentiate a Hermite_e series.
Returns the series coefficients `c` differentiated `m` times along
`axis`. At each iteration the result is multiplied by `scl` (the
scaling factor is for use in a linear change of variable). The argument
`c` is an array of coefficients from low to high degree along each
axis, e.g., [1,2,3] represents the series ``1*He_0 + 2*He_1 + 3*He_2``
while [[1,2],[1,2]] represents ``1*He_0(x)*He_0(y) + 1*He_1(x)*He_0(y)
+ 2*He_0(x)*He_1(y) + 2*He_1(x)*He_1(y)`` if axis=0 is ``x`` and axis=1
is ``y``.
Parameters
----------
c : array_like
Array of Hermite_e series coefficients. If `c` is multidimensional
the different axis correspond to different variables with the
degree in each axis given by the corresponding index.
m : int, optional
Number of derivatives taken, must be non-negative. (Default: 1)
scl : scalar, optional
Each differentiation is multiplied by `scl`. The end result is
multiplication by ``scl**m``. This is for use in a linear change of
variable. (Default: 1)
axis : int, optional
Axis over which the derivative is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
der : ndarray
Hermite series of the derivative.
See Also
--------
hermeint
Notes
-----
In general, the result of differentiating a Hermite series does not
resemble the same operation on a power series. Thus the result of this
function may be "unintuitive," albeit correct; see Examples section
below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeder
>>> hermeder([ 1., 1., 1., 1.])
array([ 1., 2., 3.])
>>> hermeder([-0.25, 1., 1./2., 1./3., 1./4 ], m=2)
array([ 1., 2., 3.])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of derivation must be integer")
if cnt < 0:
raise ValueError("The order of derivation must be non-negative")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
n = len(c)
if cnt >= n:
return c[:1]*0
else:
for i in range(cnt):
n = n - 1
c *= scl
der = np.empty((n,) + c.shape[1:], dtype=c.dtype)
for j in range(n, 0, -1):
der[j - 1] = j*c[j]
c = der
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermeint(c, m=1, k=[], lbnd=0, scl=1, axis=0):
"""
Integrate a Hermite_e series.
Returns the Hermite_e series coefficients `c` integrated `m` times from
`lbnd` along `axis`. At each iteration the resulting series is
**multiplied** by `scl` and an integration constant, `k`, is added.
The scaling factor is for use in a linear change of variable. ("Buyer
beware": note that, depending on what one is doing, one may want `scl`
to be the reciprocal of what one might expect; for more information,
see the Notes section below.) The argument `c` is an array of
coefficients from low to high degree along each axis, e.g., [1,2,3]
represents the series ``H_0 + 2*H_1 + 3*H_2`` while [[1,2],[1,2]]
represents ``1*H_0(x)*H_0(y) + 1*H_1(x)*H_0(y) + 2*H_0(x)*H_1(y) +
2*H_1(x)*H_1(y)`` if axis=0 is ``x`` and axis=1 is ``y``.
Parameters
----------
c : array_like
Array of Hermite_e series coefficients. If c is multidimensional
the different axis correspond to different variables with the
degree in each axis given by the corresponding index.
m : int, optional
Order of integration, must be positive. (Default: 1)
k : {[], list, scalar}, optional
Integration constant(s). The value of the first integral at
``lbnd`` is the first value in the list, the value of the second
integral at ``lbnd`` is the second value, etc. If ``k == []`` (the
default), all constants are set to zero. If ``m == 1``, a single
scalar can be given instead of a list.
lbnd : scalar, optional
The lower bound of the integral. (Default: 0)
scl : scalar, optional
Following each integration the result is *multiplied* by `scl`
before the integration constant is added. (Default: 1)
axis : int, optional
Axis over which the integral is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
S : ndarray
Hermite_e series coefficients of the integral.
Raises
------
ValueError
If ``m < 0``, ``len(k) > m``, ``np.isscalar(lbnd) == False``, or
``np.isscalar(scl) == False``.
See Also
--------
hermeder
Notes
-----
Note that the result of each integration is *multiplied* by `scl`.
Why is this important to note? Say one is making a linear change of
variable :math:`u = ax + b` in an integral relative to `x`. Then
.. math::`dx = du/a`, so one will need to set `scl` equal to
:math:`1/a` - perhaps not what one would have first thought.
Also note that, in general, the result of integrating a C-series needs
to be "reprojected" onto the C-series basis set. Thus, typically,
the result of this function is "unintuitive," albeit correct; see
Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeint
>>> hermeint([1, 2, 3]) # integrate once, value 0 at 0.
array([ 1., 1., 1., 1.])
>>> hermeint([1, 2, 3], m=2) # integrate twice, value & deriv 0 at 0
array([-0.25 , 1. , 0.5 , 0.33333333, 0.25 ])
>>> hermeint([1, 2, 3], k=1) # integrate once, value 1 at 0.
array([ 2., 1., 1., 1.])
>>> hermeint([1, 2, 3], lbnd=-1) # integrate once, value 0 at -1
array([-1., 1., 1., 1.])
>>> hermeint([1, 2, 3], m=2, k=[1, 2], lbnd=-1)
array([ 1.83333333, 0. , 0.5 , 0.33333333, 0.25 ])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if not np.iterable(k):
k = [k]
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of integration must be integer")
if cnt < 0:
raise ValueError("The order of integration must be non-negative")
if len(k) > cnt:
raise ValueError("Too many integration constants")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
k = list(k) + [0]*(cnt - len(k))
for i in range(cnt):
n = len(c)
c *= scl
if n == 1 and np.all(c[0] == 0):
c[0] += k[i]
else:
tmp = np.empty((n + 1,) + c.shape[1:], dtype=c.dtype)
tmp[0] = c[0]*0
tmp[1] = c[0]
for j in range(1, n):
tmp[j + 1] = c[j]/(j + 1)
tmp[0] += k[i] - hermeval(lbnd, tmp)
c = tmp
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermeval(x, c, tensor=True):
"""
Evaluate an HermiteE series at points x.
If `c` is of length `n + 1`, this function returns the value:
.. math:: p(x) = c_0 * He_0(x) + c_1 * He_1(x) + ... + c_n * He_n(x)
The parameter `x` is converted to an array only if it is a tuple or a
list, otherwise it is treated as a scalar. In either case, either `x`
or its elements must support multiplication and addition both with
themselves and with the elements of `c`.
If `c` is a 1-D array, then `p(x)` will have the same shape as `x`. If
`c` is multidimensional, then the shape of the result depends on the
value of `tensor`. If `tensor` is true the shape will be c.shape[1:] +
x.shape. If `tensor` is false the shape will be c.shape[1:]. Note that
scalars have shape (,).
Trailing zeros in the coefficients will be used in the evaluation, so
they should be avoided if efficiency is a concern.
Parameters
----------
x : array_like, compatible object
If `x` is a list or tuple, it is converted to an ndarray, otherwise
it is left unchanged and treated as a scalar. In either case, `x`
or its elements must support addition and multiplication with
with themselves and with the elements of `c`.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree n are contained in c[n]. If `c` is multidimensional the
remaining indices enumerate multiple polynomials. In the two
dimensional case the coefficients may be thought of as stored in
the columns of `c`.
tensor : boolean, optional
If True, the shape of the coefficient array is extended with ones
on the right, one for each dimension of `x`. Scalars have dimension 0
for this action. The result is that every column of coefficients in
`c` is evaluated for every element of `x`. If False, `x` is broadcast
over the columns of `c` for the evaluation. This keyword is useful
when `c` is multidimensional. The default value is True.
.. versionadded:: 1.7.0
Returns
-------
values : ndarray, algebra_like
The shape of the return value is described above.
See Also
--------
hermeval2d, hermegrid2d, hermeval3d, hermegrid3d
Notes
-----
The evaluation uses Clenshaw recursion, aka synthetic division.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeval
>>> coef = [1,2,3]
>>> hermeval(1, coef)
3.0
>>> hermeval([[1,2],[3,4]], coef)
array([[ 3., 14.],
[ 31., 54.]])
"""
c = np.array(c, ndmin=1, copy=0)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if isinstance(x, (tuple, list)):
x = np.asarray(x)
if isinstance(x, np.ndarray) and tensor:
c = c.reshape(c.shape + (1,)*x.ndim)
if len(c) == 1:
c0 = c[0]
c1 = 0
elif len(c) == 2:
c0 = c[0]
c1 = c[1]
else:
nd = len(c)
c0 = c[-2]
c1 = c[-1]
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = c[-i] - c1*(nd - 1)
c1 = tmp + c1*x
return c0 + c1*x
def hermeval2d(x, y, c):
"""
Evaluate a 2-D HermiteE series at points (x, y).
This function returns the values:
.. math:: p(x,y) = \\sum_{i,j} c_{i,j} * He_i(x) * He_j(y)
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars and they
must have the same shape after conversion. In either case, either `x`
and `y` or their elements must support multiplication and addition both
with themselves and with the elements of `c`.
If `c` is a 1-D array a one is implicitly appended to its shape to make
it 2-D. The shape of the result will be c.shape[2:] + x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points `(x, y)`,
where `x` and `y` must have the same shape. If `x` or `y` is a list
or tuple, it is first converted to an ndarray, otherwise it is left
unchanged and if it isn't an ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term
of multi-degree i,j is contained in ``c[i,j]``. If `c` has
dimension greater than two the remaining indices enumerate multiple
sets of coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points formed with
pairs of corresponding values from `x` and `y`.
See Also
--------
hermeval, hermegrid2d, hermeval3d, hermegrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y = np.array((x, y), copy=0)
except:
raise ValueError('x, y are incompatible')
c = hermeval(x, c)
c = hermeval(y, c, tensor=False)
return c
def hermegrid2d(x, y, c):
"""
Evaluate a 2-D HermiteE series on the Cartesian product of x and y.
This function returns the values:
.. math:: p(a,b) = \sum_{i,j} c_{i,j} * H_i(a) * H_j(b)
where the points `(a, b)` consist of all pairs formed by taking
`a` from `x` and `b` from `y`. The resulting points form a grid with
`x` in the first dimension and `y` in the second.
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars. In either
case, either `x` and `y` or their elements must support multiplication
and addition both with themselves and with the elements of `c`.
If `c` has fewer than two dimensions, ones are implicitly appended to
its shape to make it 2-D. The shape of the result will be c.shape[2:] +
x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points in the
Cartesian product of `x` and `y`. If `x` or `y` is a list or
tuple, it is first converted to an ndarray, otherwise it is left
unchanged and, if it isn't an ndarray, it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
hermeval, hermeval2d, hermeval3d, hermegrid3d
Notes
-----
.. versionadded::1.7.0
"""
c = hermeval(x, c)
c = hermeval(y, c)
return c
def hermeval3d(x, y, z, c):
"""
Evaluate a 3-D Hermite_e series at points (x, y, z).
This function returns the values:
.. math:: p(x,y,z) = \\sum_{i,j,k} c_{i,j,k} * He_i(x) * He_j(y) * He_k(z)
The parameters `x`, `y`, and `z` are converted to arrays only if
they are tuples or a lists, otherwise they are treated as a scalars and
they must have the same shape after conversion. In either case, either
`x`, `y`, and `z` or their elements must support multiplication and
addition both with themselves and with the elements of `c`.
If `c` has fewer than 3 dimensions, ones are implicitly appended to its
shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape.
Parameters
----------
x, y, z : array_like, compatible object
The three dimensional series is evaluated at the points
`(x, y, z)`, where `x`, `y`, and `z` must have the same shape. If
any of `x`, `y`, or `z` is a list or tuple, it is first converted
to an ndarray, otherwise it is left unchanged and if it isn't an
ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j,k is contained in ``c[i,j,k]``. If `c` has dimension
greater than 3 the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the multidimensional polynomial on points formed with
triples of corresponding values from `x`, `y`, and `z`.
See Also
--------
hermeval, hermeval2d, hermegrid2d, hermegrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y, z = np.array((x, y, z), copy=0)
except:
raise ValueError('x, y, z are incompatible')
c = hermeval(x, c)
c = hermeval(y, c, tensor=False)
c = hermeval(z, c, tensor=False)
return c
def hermegrid3d(x, y, z, c):
"""
Evaluate a 3-D HermiteE series on the Cartesian product of x, y, and z.
This function returns the values:
.. math:: p(a,b,c) = \\sum_{i,j,k} c_{i,j,k} * He_i(a) * He_j(b) * He_k(c)
where the points `(a, b, c)` consist of all triples formed by taking
`a` from `x`, `b` from `y`, and `c` from `z`. The resulting points form
a grid with `x` in the first dimension, `y` in the second, and `z` in
the third.
The parameters `x`, `y`, and `z` are converted to arrays only if they
are tuples or a lists, otherwise they are treated as a scalars. In
either case, either `x`, `y`, and `z` or their elements must support
multiplication and addition both with themselves and with the elements
of `c`.
If `c` has fewer than three dimensions, ones are implicitly appended to
its shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape + y.shape + z.shape.
Parameters
----------
x, y, z : array_like, compatible objects
The three dimensional series is evaluated at the points in the
Cartesian product of `x`, `y`, and `z`. If `x`,`y`, or `z` is a
list or tuple, it is first converted to an ndarray, otherwise it is
left unchanged and, if it isn't an ndarray, it is treated as a
scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
hermeval, hermeval2d, hermegrid2d, hermeval3d
Notes
-----
.. versionadded::1.7.0
"""
c = hermeval(x, c)
c = hermeval(y, c)
c = hermeval(z, c)
return c
def hermevander(x, deg):
"""Pseudo-Vandermonde matrix of given degree.
Returns the pseudo-Vandermonde matrix of degree `deg` and sample points
`x`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., i] = He_i(x),
where `0 <= i <= deg`. The leading indices of `V` index the elements of
`x` and the last index is the degree of the HermiteE polynomial.
If `c` is a 1-D array of coefficients of length `n + 1` and `V` is the
array ``V = hermevander(x, n)``, then ``np.dot(V, c)`` and
``hermeval(x, c)`` are the same up to roundoff. This equivalence is
useful both for least squares fitting and for the evaluation of a large
number of HermiteE series of the same degree and sample points.
Parameters
----------
x : array_like
Array of points. The dtype is converted to float64 or complex128
depending on whether any of the elements are complex. If `x` is
scalar it is converted to a 1-D array.
deg : int
Degree of the resulting matrix.
Returns
-------
vander : ndarray
The pseudo-Vandermonde matrix. The shape of the returned matrix is
``x.shape + (deg + 1,)``, where The last index is the degree of the
corresponding HermiteE polynomial. The dtype will be the same as
the converted `x`.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermevander
>>> x = np.array([-1, 0, 1])
>>> hermevander(x, 3)
array([[ 1., -1., 0., 2.],
[ 1., 0., -1., -0.],
[ 1., 1., 0., -2.]])
"""
ideg = int(deg)
if ideg != deg:
raise ValueError("deg must be integer")
if ideg < 0:
raise ValueError("deg must be non-negative")
x = np.array(x, copy=0, ndmin=1) + 0.0
dims = (ideg + 1,) + x.shape
dtyp = x.dtype
v = np.empty(dims, dtype=dtyp)
v[0] = x*0 + 1
if ideg > 0:
v[1] = x
for i in range(2, ideg + 1):
v[i] = (v[i-1]*x - v[i-2]*(i - 1))
return np.rollaxis(v, 0, v.ndim)
def hermevander2d(x, y, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y)`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., deg[1]*i + j] = He_i(x) * He_j(y),
where `0 <= i <= deg[0]` and `0 <= j <= deg[1]`. The leading indices of
`V` index the points `(x, y)` and the last index encodes the degrees of
the HermiteE polynomials.
If ``V = hermevander2d(x, y, [xdeg, ydeg])``, then the columns of `V`
correspond to the elements of a 2-D coefficient array `c` of shape
(xdeg + 1, ydeg + 1) in the order
.. math:: c_{00}, c_{01}, c_{02} ... , c_{10}, c_{11}, c_{12} ...
and ``np.dot(V, c.flat)`` and ``hermeval2d(x, y, c)`` will be the same
up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 2-D HermiteE
series of the same degrees and sample points.
Parameters
----------
x, y : array_like
Arrays of point coordinates, all of the same shape. The dtypes
will be converted to either float64 or complex128 depending on
whether any of the elements are complex. Scalars are converted to
1-D arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg].
Returns
-------
vander2d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)`. The dtype will be the same
as the converted `x` and `y`.
See Also
--------
hermevander, hermevander3d. hermeval2d, hermeval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy = ideg
x, y = np.array((x, y), copy=0) + 0.0
vx = hermevander(x, degx)
vy = hermevander(y, degy)
v = vx[..., None]*vy[..., None,:]
return v.reshape(v.shape[:-2] + (-1,))
def hermevander3d(x, y, z, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y, z)`. If `l, m, n` are the given degrees in `x, y, z`,
then Hehe pseudo-Vandermonde matrix is defined by
.. math:: V[..., (m+1)(n+1)i + (n+1)j + k] = He_i(x)*He_j(y)*He_k(z),
where `0 <= i <= l`, `0 <= j <= m`, and `0 <= j <= n`. The leading
indices of `V` index the points `(x, y, z)` and the last index encodes
the degrees of the HermiteE polynomials.
If ``V = hermevander3d(x, y, z, [xdeg, ydeg, zdeg])``, then the columns
of `V` correspond to the elements of a 3-D coefficient array `c` of
shape (xdeg + 1, ydeg + 1, zdeg + 1) in the order
.. math:: c_{000}, c_{001}, c_{002},... , c_{010}, c_{011}, c_{012},...
and ``np.dot(V, c.flat)`` and ``hermeval3d(x, y, z, c)`` will be the
same up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 3-D HermiteE
series of the same degrees and sample points.
Parameters
----------
x, y, z : array_like
Arrays of point coordinates, all of the same shape. The dtypes will
be converted to either float64 or complex128 depending on whether
any of the elements are complex. Scalars are converted to 1-D
arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg, z_deg].
Returns
-------
vander3d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)*(deg[2]+1)`. The dtype will
be the same as the converted `x`, `y`, and `z`.
See Also
--------
hermevander, hermevander3d. hermeval2d, hermeval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy, degz = ideg
x, y, z = np.array((x, y, z), copy=0) + 0.0
vx = hermevander(x, degx)
vy = hermevander(y, degy)
vz = hermevander(z, degz)
v = vx[..., None, None]*vy[..., None,:, None]*vz[..., None, None,:]
return v.reshape(v.shape[:-3] + (-1,))
def hermefit(x, y, deg, rcond=None, full=False, w=None):
"""
Least squares fit of Hermite series to data.
Return the coefficients of a HermiteE series of degree `deg` that is
the least squares fit to the data values `y` given at points `x`. If
`y` is 1-D the returned coefficients will also be 1-D. If `y` is 2-D
multiple fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * He_1(x) + ... + c_n * He_n(x),
where `n` is `deg`.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int or 1-D array_like
Degree(s) of the fitting polynomials. If `deg` is a single integer
all terms up to and including the `deg`'th term are included in the
fit. For Numpy versions >= 1.11 a list of integers specifying the
degrees of the terms to include may be used instead.
rcond : float, optional
Relative condition number of the fit. Singular values smaller than
this relative to the largest singular value will be ignored. The
default value is len(x)*eps, where eps is the relative precision of
the float type, about 2e-16 in most cases.
full : bool, optional
Switch determining nature of return value. When it is False (the
default) just the coefficients are returned, when True diagnostic
information from the singular value decomposition is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
Returns
-------
coef : ndarray, shape (M,) or (M, K)
Hermite coefficients ordered from low to high. If `y` was 2-D,
the coefficients for the data in column k of `y` are in column
`k`.
[residuals, rank, singular_values, rcond] : list
These values are only returned if `full` = True
resid -- sum of squared residuals of the least squares fit
rank -- the numerical rank of the scaled Vandermonde matrix
sv -- singular values of the scaled Vandermonde matrix
rcond -- value of `rcond`.
For more details, see `linalg.lstsq`.
Warns
-----
RankWarning
The rank of the coefficient matrix in the least-squares fit is
deficient. The warning is only raised if `full` = False. The
warnings can be turned off by
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, legfit, polyfit, hermfit, polyfit
hermeval : Evaluates a Hermite series.
hermevander : pseudo Vandermonde matrix of Hermite series.
hermeweight : HermiteE weight function.
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the HermiteE series `p` that
minimizes the sum of the weighted squared errors
.. math:: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where the :math:`w_j` are the weights. This problem is solved by
setting up the (typically) overdetermined matrix equation
.. math:: V(x) * c = w * y,
where `V` is the pseudo Vandermonde matrix of `x`, the elements of `c`
are the coefficients to be solved for, and the elements of `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected, then a `RankWarning` will be issued. This means that the
coefficient values may be poorly determined. Using a lower order fit
will usually get rid of the warning. The `rcond` parameter can also be
set to a value smaller than its default, but the resulting fit may be
spurious and have large contributions from roundoff error.
Fits using HermiteE series are probably most useful when the data can
be approximated by ``sqrt(w(x)) * p(x)``, where `w(x)` is the HermiteE
weight. In that case the weight ``sqrt(w(x[i])`` should be used
together with data values ``y[i]/sqrt(w(x[i])``. The weight function is
available as `hermeweight`.
References
----------
.. [1] Wikipedia, "Curve fitting",
http://en.wikipedia.org/wiki/Curve_fitting
Examples
--------
>>> from numpy.polynomial.hermite_e import hermefik, hermeval
>>> x = np.linspace(-10, 10)
>>> err = np.random.randn(len(x))/10
>>> y = hermeval(x, [1, 2, 3]) + err
>>> hermefit(x, y, 2)
array([ 1.01690445, 1.99951418, 2.99948696])
"""
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
deg = np.asarray(deg)
# check arguments.
if deg.ndim > 1 or deg.dtype.kind not in 'iu' or deg.size == 0:
raise TypeError("deg must be an int or non-empty 1-D array of int")
if deg.min() < 0:
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2:
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
if deg.ndim == 0:
lmax = deg
order = lmax + 1
van = hermevander(x, lmax)
else:
deg = np.sort(deg)
lmax = deg[-1]
order = len(deg)
van = hermevander(x, lmax)[:, deg]
# set up the least squares matrices in transposed form
lhs = van.T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# apply weights. Don't use inplace operations as they
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None:
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# Expand c to include non-fitted coefficients which are set to zero
if deg.ndim > 0:
if c.ndim == 2:
cc = np.zeros((lmax+1, c.shape[1]), dtype=c.dtype)
else:
cc = np.zeros(lmax+1, dtype=c.dtype)
cc[deg] = c
c = cc
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning)
if full:
return c, [resids, rank, s, rcond]
else:
return c
def hermecompanion(c):
"""
Return the scaled companion matrix of c.
The basis polynomials are scaled so that the companion matrix is
symmetric when `c` is an HermiteE basis polynomial. This provides
better eigenvalue estimates than the unscaled case and for basis
polynomials the eigenvalues are guaranteed to be real if
`numpy.linalg.eigvalsh` is used to obtain them.
Parameters
----------
c : array_like
1-D array of HermiteE series coefficients ordered from low to high
degree.
Returns
-------
mat : ndarray
Scaled companion matrix of dimensions (deg, deg).
Notes
-----
.. versionadded::1.7.0
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
raise ValueError('Series must have maximum degree of at least 1.')
if len(c) == 2:
return np.array([[-c[0]/c[1]]])
n = len(c) - 1
mat = np.zeros((n, n), dtype=c.dtype)
scl = np.hstack((1., 1./np.sqrt(np.arange(n - 1, 0, -1))))
scl = np.multiply.accumulate(scl)[::-1]
top = mat.reshape(-1)[1::n+1]
bot = mat.reshape(-1)[n::n+1]
top[...] = np.sqrt(np.arange(1, n))
bot[...] = top
mat[:, -1] -= scl*c[:-1]/c[-1]
return mat
def hermeroots(c):
"""
Compute the roots of a HermiteE series.
Return the roots (a.k.a. "zeros") of the polynomial
.. math:: p(x) = \\sum_i c[i] * He_i(x).
Parameters
----------
c : 1-D array_like
1-D array of coefficients.
Returns
-------
out : ndarray
Array of the roots of the series. If all the roots are real,
then `out` is also real, otherwise it is complex.
See Also
--------
polyroots, legroots, lagroots, hermroots, chebroots
Notes
-----
The root estimates are obtained as the eigenvalues of the companion
matrix, Roots far from the origin of the complex plane may have large
errors due to the numerical instability of the series for such
values. Roots with multiplicity greater than 1 will also show larger
errors as the value of the series near such points is relatively
insensitive to errors in the roots. Isolated roots near the origin can
be improved by a few iterations of Newton's method.
The HermiteE series basis polynomials aren't powers of `x` so the
results of this function may seem unintuitive.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeroots, hermefromroots
>>> coef = hermefromroots([-1, 0, 1])
>>> coef
array([ 0., 2., 0., 1.])
>>> hermeroots(coef)
array([-1., 0., 1.])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) <= 1:
return np.array([], dtype=c.dtype)
if len(c) == 2:
return np.array([-c[0]/c[1]])
m = hermecompanion(c)
r = la.eigvals(m)
r.sort()
return r
def _normed_hermite_e_n(x, n):
"""
Evaluate a normalized HermiteE polynomial.
Compute the value of the normalized HermiteE polynomial of degree ``n``
at the points ``x``.
Parameters
----------
x : ndarray of double.
Points at which to evaluate the function
n : int
Degree of the normalized HermiteE function to be evaluated.
Returns
-------
values : ndarray
The shape of the return value is described above.
Notes
-----
.. versionadded:: 1.10.0
This function is needed for finding the Gauss points and integration
weights for high degrees. The values of the standard HermiteE functions
overflow when n >= 207.
"""
if n == 0:
return np.ones(x.shape)/np.sqrt(np.sqrt(2*np.pi))
c0 = 0.
c1 = 1./np.sqrt(np.sqrt(2*np.pi))
nd = float(n)
for i in range(n - 1):
tmp = c0
c0 = -c1*np.sqrt((nd - 1.)/nd)
c1 = tmp + c1*x*np.sqrt(1./nd)
nd = nd - 1.0
return c0 + c1*x
def hermegauss(deg):
"""
Gauss-HermiteE quadrature.
Computes the sample points and weights for Gauss-HermiteE quadrature.
These sample points and weights will correctly integrate polynomials of
degree :math:`2*deg - 1` or less over the interval :math:`[-\inf, \inf]`
with the weight function :math:`f(x) = \exp(-x^2/2)`.
Parameters
----------
deg : int
Number of sample points and weights. It must be >= 1.
Returns
-------
x : ndarray
1-D ndarray containing the sample points.
y : ndarray
1-D ndarray containing the weights.
Notes
-----
.. versionadded::1.7.0
The results have only been tested up to degree 100, higher degrees may
be problematic. The weights are determined by using the fact that
.. math:: w_k = c / (He'_n(x_k) * He_{n-1}(x_k))
where :math:`c` is a constant independent of :math:`k` and :math:`x_k`
is the k'th root of :math:`He_n`, and then scaling the results to get
the right value when integrating 1.
"""
ideg = int(deg)
if ideg != deg or ideg < 1:
raise ValueError("deg must be a non-negative integer")
# first approximation of roots. We use the fact that the companion
# matrix is symmetric in this case in order to obtain better zeros.
c = np.array([0]*deg + [1])
m = hermecompanion(c)
x = la.eigvalsh(m)
# improve roots by one application of Newton
dy = _normed_hermite_e_n(x, ideg)
df = _normed_hermite_e_n(x, ideg - 1) * np.sqrt(ideg)
x -= dy/df
# compute the weights. We scale the factor to avoid possible numerical
# overflow.
fm = _normed_hermite_e_n(x, ideg - 1)
fm /= np.abs(fm).max()
w = 1/(fm * fm)
# for Hermite_e we can also symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w to get the right value
w *= np.sqrt(2*np.pi) / w.sum()
return x, w
def hermeweight(x):
"""Weight function of the Hermite_e polynomials.
The weight function is :math:`\exp(-x^2/2)` and the interval of
integration is :math:`[-\inf, \inf]`. the HermiteE polynomials are
orthogonal, but not normalized, with respect to this weight function.
Parameters
----------
x : array_like
Values at which the weight function will be computed.
Returns
-------
w : ndarray
The weight function at `x`.
Notes
-----
.. versionadded::1.7.0
"""
w = np.exp(-.5*x**2)
return w
#
# HermiteE series class
#
class HermiteE(ABCPolyBase):
"""An HermiteE series class.
The HermiteE class provides the standard Python numerical methods
'+', '-', '*', '//', '%', 'divmod', '**', and '()' as well as the
attributes and methods listed in the `ABCPolyBase` documentation.
Parameters
----------
coef : array_like
HermiteE coefficients in order of increasing degree, i.e,
``(1, 2, 3)`` gives ``1*He_0(x) + 2*He_1(X) + 3*He_2(x)``.
domain : (2,) array_like, optional
Domain to use. The interval ``[domain[0], domain[1]]`` is mapped
to the interval ``[window[0], window[1]]`` by shifting and scaling.
The default value is [-1, 1].
window : (2,) array_like, optional
Window, see `domain` for its use. The default value is [-1, 1].
.. versionadded:: 1.6.0
"""
# Virtual Functions
_add = staticmethod(hermeadd)
_sub = staticmethod(hermesub)
_mul = staticmethod(hermemul)
_div = staticmethod(hermediv)
_pow = staticmethod(hermepow)
_val = staticmethod(hermeval)
_int = staticmethod(hermeint)
_der = staticmethod(hermeder)
_fit = staticmethod(hermefit)
_line = staticmethod(hermeline)
_roots = staticmethod(hermeroots)
_fromroots = staticmethod(hermefromroots)
# Virtual properties
nickname = 'herme'
domain = np.array(hermedomain)
window = np.array(hermedomain)
| mit |
brianhelba/pylibtiff | libtiff/tests/test_simple.py | 2 | 1148 | import os
from tempfile import mktemp
from numpy import *
from libtiff import TIFF
def test_write_read():
for itype in [uint8, uint16, uint32, uint64,
int8, int16, int32, int64,
float32, float64,
complex64, complex128]:
image = array([[1, 2, 3], [4, 5, 6]], itype)
fn = mktemp('.tif')
tif = TIFF.open(fn, 'w')
tif.write_image(image)
tif.close()
tif = TIFF.open(fn, 'r')
image2 = tif.read_image()
tif.close()
os.remove(fn)
assert image.dtype == image2.dtype
assert (image == image2).all()
def test_slicing():
shape = (16, 16)
image = random.randint(255, size=shape)
for i in range(shape[0]):
for j in range(shape[1]):
image1 = image[:i + 1, :j + 1]
fn = mktemp('.tif')
tif = TIFF.open(fn, 'w')
tif.write_image(image1)
tif.close()
tif = TIFF.open(fn, 'r')
image2 = tif.read_image()
tif.close()
assert (image1 == image2).all(), repr((i, j))
os.remove(fn)
| bsd-3-clause |
rx2130/Leetcode | python/11 Container With Most Water.py | 1 | 3779 | class Solution(object):
def maxArea(self, height):
"""
:type height: List[int]
:rtype: int
"""
# Op1: Brute force
ans = 0
for i in range(1, len(height)):
for j in range(i):
area = min(height[i], height[j]) * (i - j)
ans = max(area, ans)
return ans
def maxArea2(self, height):
maxWater = 0
i, j = 0, len(height) - 1
while i < j:
h = min(height[j], height[i])
maxWater = max(maxWater, (j - i) * h)
while height[i] <= h and i < j:
i += 1
while height[j] <= h and i < j:
j -= 1
return maxWater
# height = [59,15,23,55,30,47,61,74,86,25,42,40,21,0,87,79,45,42,0,47,61,93,69,1,42,93,2,92,15,97,38,26,64,14,33,95,61,94,21,48,20,15,88,41,67,28,72,12,22,73,60,35,66,81,88,61,74,90,53,41,87,44,67,4,58,0,51,19,47,72,19,19,87,60,12,55,88,84,19,10,57,31,46,76,12,34,37,39,77,42,80,16,86,0,20,96,0,71,16,99,95,87,18,83,47,30,90,87,15,9,50,24,41,48,0,5,82,89,44,11,84,77,28,22,77,0,19,29,24,87,29,19,74,47,54,74,78,44,13,45,54,63,69,47,11,22,52,46,63,49,57,47,26,37,22,3,90,41,32,14,28,13,85,54,61,40,80,39,84,94,36,90,9,5,37,21,27,42,19,43,91,28,42,69,66,64,72,8,5,56,22,85,70,7,92,83,99,72,74,84,18,10,26,28,15,64,49,95,58,20,38,49,48,80,70,66,97,42,74,2,50,48,40,72,8,32,55,7,56,81,43,75,91,70,3,59,86,4,54,44,24,44,45,72,76,15,91,73,9,65,28,11,66,68,84,74,52,91,81,8,73,77,35,16,47,38,75,33,94,29,77,70,25,74,95,54,89,86,27,98,3,7,61,69,75,45,43,79,89,77,88,62,54,23,30,53,14,6,86,60,87,15,31,13,89,78,19,30,64,46,80,67,6,41,37,33,87,32,13,28,9,53,42,15,76,72,68,42,78,6,55,66,21,38,31,62,16,50,92,32,48,72,51,54,14,40,88,53,73,1,81,34,54,23,2,82,95,70,77,74,29,84,92,50,22,23,13,38,73,5,22,21,78,73,28,44,14,16,97,39,69,78,73,75,1,75,9,48,98,38,74,27,22,66,29,44,89,94,34,14,0,56,88,30,30,16,26,96,32,23,35,53,1,60,28,54,88,37,2,86,76,77,65,50,95,46,95,85,41,29,51,93,38,39,75,20,55,1,16,87,24,3,40,77,63,20,31,51,58,85,89,86,14,6,36,10,5,83,47,46,65,98,39,55,38,66,75,93,67,43,33,91,46,25,68,61,46,51,65,56,36,6,94,3,65,30,13,70,66,60,68,31,58,59,86,48,25,13,42,92,56,75,35,54,0,3,67,46,6,32,54,94,91,48,97,56,31,10,78,97,22,46,80,81,5,18,81,82,31,23,74,39,50,9,45,51,64,12,49,70,97,4,16,40,4,14,48,35,76,26,32,99,24,64,32,29,82,13,11,65,37,37,56,87,98,1,90,62,66,92,84,63,48,0,3,52,66,3,88,43,29,72,42,53,37,74,34,71,87,97,37,76,86,93,16,84,95,58,46,13,2,82,28,50,34,31,55,1,86,95,44,15,67,38,20,56,12,54,28,51,3,17,80,89,10,48,73,57,6,71,70,9,53,98,11,39,81,66,40,67,13,36,34,33,74,54,89,86,60,69,90,63,86,22,52,49,70,77,6,76,48,29,37,53,79,49,93,61,67,33,80,81,70,15,66,96,21,55,35,82,77,25,97,63,47,50,12,17,27,71,45,28,0,83,81,79,84,74,92,51,60,25,84,82,92,2,78,13,58,13,47,87,90,45,50,89,47,15,58,26,86,4,54,38,39,36,69,23,62,14,26,22,39,63,56,31,65,35,96,75,0,96,62,43,41,65,32,88,32,91,14,70,47,69,8,38,57,29,13,71,43,91,94,82,54,2,65,72,37,14,99,90,10,14,33,51,79,65,91,63,8,5,33,7,26,93,45,35,22,10,7,66,2,53,48,8,55,66,80,45,80,32,35,90,46,20,93,77,37,84,40,98,41,25,5,68,18,3,3,40,13,10,6,15,15,7,76,71,73,56,16,5,88,3,47,86,23,40,63,60,24,3,58,65,80,16,85,98,19,41,39,84,3,97,52,19,4,28,90,29,36,58,34,77,61,33,63,36,73,27,48,49,82,59,67,63,75,52,61,46,93,52,82,49,50,34,68,6,62,10,36,51,68,22,28,81,56,91,17,81,70,65,31,5,24,98,68,51,50,81,49,96,86,32,45,36,66,65,42,81,75,30,32,95,5,60,76,61,3,45,42,74,10,25,79,87,23,99,90,74,32,40,22,18,72,19,6,90,84,1,71,11,31,55,6,36,67,34,49,71,79,44,97,41,69,28,28,93,27,71,19,11,11,93,30,83,12,88,73,48,89,97,59,73,52,65,9,20,51,11,91,30,55,40,71,76,68,52,21,47]
height = [59, 15, 83, 55, 30, 45, 40, 71, 76, 68, 52, 21, 47]
test = Solution()
print(test.maxArea(height))
print(test.maxArea2(height))
| apache-2.0 |
jiankers/weevely3 | core/vectorlist.py | 14 | 6460 | """
The module `core.vectorlist` defines a `VectorList` object, normally used
to store the module vectors.
Module class executes `_register_vectors()` at init to initialize the `VectorList`
object as `self.vectors` module attribute.
The methods exposed by VectorList can be used to get the result of a
given vector execution with `get_result()`, get all the results of a bunch of
vectors with `get_results()`, or get the result of the first vector that
response in the way we want with `find_first_result()`.
"""
from core.vectors import Os
from mako.template import Template
from core.weexceptions import DevException
from core.loggers import log, dlog
from core import modules
import utils
from core import messages
class VectorList(list):
def __init__(self, session, module_name):
self.session = session
self.module_name = module_name
list.__init__(self)
def find_first_result(self, names = [], format_args = {}, condition = None, store_result = False, store_name = ''):
""" Execute all the vectors and return the first result matching the given condition.
Return the name and the result of the first vector execution response that satisfy
the given condition.
With unspecified names, execute all the vectors. Optionally store results.
Exceptions triggered checking condition function are catched and logged.
Args:
names (list of str): The list of names of vectors to execute.
format_args (dict): The arguments dictionary used to format the vectors with.
condition (function): The function or lambda to check certain conditions on result.
Must returns boolean.
store_result (bool): Store as result.
store_name (str): Store the found vector name in the specified argument.
Returns:
Tuple. Contains the vector name and execution result in the
`( vector_name, result )` form.
"""
if not callable(condition):
raise DevException(messages.vectors.wrong_condition_type)
if not isinstance(store_name, str):
raise DevException(messages.vectors.wrong_store_name_type)
for vector in self:
# Skip with wrong vectors
if not self._os_match(vector.target): continue
# Clean names filter from empty objects
names = [ n for n in names if n ]
# Skip if names filter is passed but current vector is missing
if names and not any(n in vector.name for n in names): continue
# Add current vector name
format_args['current_vector'] = vector.name
# Run
result = vector.run(format_args)
# See if condition is verified
try:
condition_result = condition(result)
except Exception as e:
import traceback; log.info(traceback.format_exc())
log.debug(messages.vectorlist.vector_s_triggers_an_exc % vector.name)
condition_result = False
# Eventually store result or vector name
if condition_result:
if store_result:
self.session[self.module_name]['results'][vector.name] = result
if store_name:
self.session[self.module_name]['stored_args'][store_name] = vector.name
return vector.name, result
return None, None
def get_result(self, name, format_args = {}, store_result = False):
"""Execute one vector and return the result.
Run the vector with specified name. Optionally store results.
Args:
name (str): The name of vector to execute.
format_args (dict): The arguments dictionary used to format the vectors with.
store_result (bool): Store result in session.
Returns:
Object. Contains the vector execution result.
"""
vector = self.get_by_name(name)
if vector and self._os_match(vector.target):
# Add current vector name
format_args['current_vector'] = vector.name
result = vector.run(format_args)
if store_result:
self.session[self.module_name]['results'][name] = result
return result
def get_results(self, names = [], format_args = {}, results_to_store = [ ]):
"""Execute all the vectors and return the results.
With unspecified names, execute all the vectors. Optionally store results.
Returns a dictionary with results.
Args:
names (list of str): The list of names of vectors to execute.
format_args (dict): The arguments dictionary used to format the vectors with.
results_to_store (list of str): The list of names of the vectors which
store the execution result.
Returns:
Dictionary. Contains all the vector results in the
`{ vector_name : result }` form.
"""
response = {}
for vector in self:
if not self._os_match(vector.target): continue
if names and not any(x in vector.name for x in names): continue
# Add current vector name
format_args['current_vector'] = vector.name
response[vector.name] = vector.run(format_args)
if not any(x in vector.name for x in results_to_store): continue
self.session[self.module_name]['results'][vector.name] = response[vector.name]
return response
def _os_match(self, os):
"""Check if vector os is compatible with the remote os."""
os_string = self.session['system_info']['results'].get('os')
# If os_string is not set, just return True and continue
if not os_string: return True
os_current = Os.WIN if os_string.lower().startswith('win') else Os.NIX
return os in (os_current, Os.ANY)
def get_by_name(self, name):
"""Get the vector object by name.
Args:
name (str): the name of the requested vector.
Returns:
Vector object.
"""
return next((v for v in self if v.name == name), None)
def get_names(self):
"""Get the vectors names.
Returns:
List of strings. Contain vectors names.
"""
return [ v.name for v in self ]
| gpl-3.0 |
oskar456/turrisclock | fakegpio.py | 1 | 1282 | #!/usr/bin/env python
# vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 smartindent cinwords=if,elif,else,for,while,try,except,finally,def,class,with
from __future__ import print_function
class GPIO:
""" Class representing one fake GPIO signal for debugging purposes """
def __init__(self, name, direction="in"):
"""
@param name GPIO number (224, 225, ...) as a string
@direction string in or out
"""
self.name = str(name)
print("GPIO {} exported".format(name))
self.reset()
self.setDirection(direction)
def __del__(self):
""" Make sure direction is set to in to protect the SoC """
self.setDirection("in")
def setDirection(self, direction):
"""Sets pin direction"""
self.direction = direction
print("GPIO {} direction set to {}".format(self.name, direction))
def get(self):
"""Return current GPIO value"""
return False
def set(self, value=True):
"""Sets GPIO to value"""
print("GPIO {} set to {}".format(self.name, '1' if value else '0'))
def reset(self):
"""Sets GPIO to value 0"""
self.set(False)
def __repr__(self):
return "GPIO({}, {})".format(self.name, self.direction)
| mit |
mhrivnak/pulp | bindings/pulp/bindings/consumer_groups.py | 3 | 7364 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from pulp.bindings.search import SearchAPI
from pulp.bindings.base import PulpAPI
class ConsumerGroupAPI(PulpAPI):
"""
Connection class to access consumer group specific calls
"""
PATH = 'v2/consumer_groups/'
def consumer_groups(self):
"""
retrieve all consumer groups
:return: all consumer groups
:rtype: list
"""
return self.server.GET(self.PATH)
def create(self, consumer_group_id, display_name, description, notes):
"""
Create a consumer group.
:param consumer_group_id: unique primary key
:type consumer_group_id: basestring
:param display_name: Human-readable name
:type display_name: basestring
:param description: text description of the consumer group
:type description: basestring
:param notes: key-value pairs to programmatically tag the consumer
:type notes: dict
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
data = {'id': consumer_group_id,
'display_name': display_name,
'description': description,
'notes': notes,}
return self.server.POST(self.PATH, data)
def consumer_group(self, consumer_group_id):
"""
Retrieve a single consumer group
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
path = self.PATH + ('%s/' % consumer_group_id)
return self.server.GET(path)
def delete(self, consumer_group_id):
"""
Delete a single consumer group
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
path = self.PATH + '%s/' % consumer_group_id
return self.server.DELETE(path)
def update(self, consumer_group_id, delta):
"""
Update a consumer group
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param delta: map of new values with attribute names as keys.
:type delta: dict
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
path = self.PATH + '%s/' % consumer_group_id
return self.server.PUT(path, delta)
class ConsumerGroupSearchAPI(SearchAPI):
"""
Consumer Group searching.
"""
PATH = 'v2/consumer_groups/search/'
class ConsumerGroupActionAPI(SearchAPI):
"""
Consumer Group Actions.
"""
PATH = 'v2/consumer_groups/%s/actions/'
def associate(self, consumer_group_id, **kwargs):
"""
Associate a set of consumers with a consumer group.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param kwargs: name based parameters that match the values accepted by
pulp.server.db.model.criteria.Criteria.__init__
:type kwargs: dict
:return: Response body
:rtype: basestring
"""
path = self.PATH % consumer_group_id + 'associate/'
filters = self.compose_filters(**kwargs)
if filters:
kwargs['filters'] = filters
self._strip_criteria_kwargs(kwargs)
response = self.server.POST(path, {'criteria':kwargs})
return response.response_body
def unassociate(self, consumer_group_id, **kwargs):
"""
Unassociate a set of consumers with a consumer group.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param kwargs: name based parameters that match the values accepted by
pulp.server.db.model.criteria.Criteria.__init__
:type kwargs: dict
:return: Response body
:rtype: basestring
"""
path = self.PATH % consumer_group_id + 'unassociate/'
filters = self.compose_filters(**kwargs)
if filters:
kwargs['filters'] = filters
self._strip_criteria_kwargs(kwargs)
response = self.server.POST(path, {'criteria':kwargs})
return response.response_body
class ConsumerGroupBindAPI(PulpAPI):
"""
Consumer Group bind operations
"""
PATH = 'v2/consumer_groups/%s/bindings/'
def bind(self, consumer_group_id, repo_id, distributor_id):
"""
Bind a consumer group to a distributor associated with a repository.
Each consumer in the consumer group will be bound.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param repo_id: repository id
:type repo_id: basestring
:param distributor_id: distributor id
:type distributor_id: basestring
"""
path = self.PATH % (consumer_group_id)
data = {'repo_id' : repo_id, 'distributor_id' : distributor_id}
response = self.server.POST(path, data)
return response
def unbind(self, consumer_group_id, repo_id, distributor_id):
"""
Unbind a consumer group to a distributor associated with a repository.
Each consumer in the consumer group will be unbound.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param repo_id: repository id
:type repo_id: basestring
:param distributor_id: distributor id
:type distributor_id: basestring
"""
path = self.PATH % (consumer_group_id) + '%s/%s/' % (repo_id, distributor_id)
response = self.server.DELETE(path)
return response
class ConsumerGroupContentAPI(PulpAPI):
"""
Consumer Group content operations
"""
PATH = 'v2/consumer_groups/%s/actions/content/'
def install(self, consumer_group_id, units, options):
path = self.PATH % consumer_group_id + 'install/'
data = {"units": units,
"options": options,}
return self.server.POST(path, data)
def update(self, consumer_group_id, units, options):
path = self.PATH % consumer_group_id + 'update/'
data = {"units": units,
"options": options,}
return self.server.POST(path, data)
def uninstall(self, consumer_group_id, units, options):
path = self.PATH % consumer_group_id + 'uninstall/'
data = {"units": units,
"options": options,}
return self.server.POST(path, data)
| gpl-2.0 |
hehongliang/tensorflow | tensorflow/tools/dist_test/scripts_allreduce/k8s_generate_yaml.py | 11 | 2997 | #!/usr/bin/python
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generates YAML configuration file for allreduce-based distributed TensorFlow.
The workers will be run in a Kubernetes (k8s) container cluster.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import k8s_generate_yaml_lib
# Note: It is intentional that we do not import tensorflow in this script. The
# machine that launches a TensorFlow k8s cluster does not have to have the
# Python package of TensorFlow installed on it.
DEFAULT_DOCKER_IMAGE = 'tensorflow/tensorflow:latest-devel'
DEFAULT_PORT = 22
DEFAULT_CONFIG_MAP = 'k8s-config-map'
DEFAULT_DEPLOYMENT = 'k8s-ml-deployment'
def main():
"""Do arg parsing."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--docker_image',
type=str,
default=DEFAULT_DOCKER_IMAGE,
help='Override default docker image for the TensorFlow')
parser.add_argument(
'--num_containers',
type=int,
default=0,
help='How many docker containers to launch')
parser.add_argument(
'--config_map',
type=str,
default=DEFAULT_CONFIG_MAP,
help='Override default config map')
parser.add_argument(
'--deployment',
type=str,
default=DEFAULT_DEPLOYMENT,
help='Override default deployment')
parser.add_argument(
'--ssh_port',
type=int,
default=DEFAULT_PORT,
help='Override default ssh port (Default: %d)' % DEFAULT_PORT)
parser.add_argument(
'--use_hostnet',
type=int,
default=0,
help='Used to enable host network mode (Default: 0)')
parser.add_argument(
'--use_shared_volume',
type=int,
default=0,
help='Used to mount shared volume (Default: 0)')
args = parser.parse_args()
if args.num_containers <= 0:
sys.stderr.write('--num_containers must be greater than 0; received %d\n' %
args.num_containers)
sys.exit(1)
# Generate contents of yaml config
yaml_config = k8s_generate_yaml_lib.GenerateConfig(
args.docker_image, args.num_containers, args.config_map, args.deployment,
args.ssh_port, args.use_hostnet, args.use_shared_volume)
print(yaml_config) # pylint: disable=superfluous-parens
if __name__ == '__main__':
main()
| apache-2.0 |
abbeymiles/aima-python | submissions/Blue/myNN.py | 10 | 3071 | from sklearn import datasets
from sklearn.neural_network import MLPClassifier
import traceback
from submissions.Blue import music
class DataFrame:
data = []
feature_names = []
target = []
target_names = []
musicATRB = DataFrame()
musicATRB.data = []
targetData = []
'''
Extract data from the CORGIS Music Library.
Most 'hit' songs average 48-52 bars and no more than ~3 minutes (180 seconds)...
'''
allSongs = music.get_songs()
for song in allSongs:
try:
length = float(song['song']["duration"])
targetData.append(length)
genre = song['artist']['terms'] #String
title = song['song']['title'] #String
# release = float(song['song']['Release'])
musicATRB.data.append([genre, title])
except:
traceback.print_exc()
musicATRB.feature_names = [
'Genre',
'Title',
'Release',
'Length',
]
musicATRB.target = []
def musicTarget(release):
if (song['song']['duration'] <= 210
): #if the song is less that 3.5 minutes (210 seconds) long
return 1
return 0
for i in targetData:
tt = musicTarget(i)
musicATRB.target.append(tt)
musicATRB.target_names = [
'Not a hit song',
'Could be a hit song',
]
Examples = {
'Music': musicATRB,
}
'''
Make a customn classifier,
'''
mlpc = MLPClassifier(
hidden_layer_sizes = (100,),
activation = 'relu',
solver='sgd', # 'adam',
alpha = 0.0001,
# batch_size='auto',
learning_rate = 'adaptive', # 'constant',
# power_t = 0.5,
max_iter = 1000, # 200,
shuffle = True,
# random_state = None,
# tol = 1e-4,
# verbose = False,
# warm_start = False,
# momentum = 0.9,
# nesterovs_momentum = True,
# early_stopping = False,
# validation_fraction = 0.1,
# beta_1 = 0.9,
# beta_2 = 0.999,
# epsilon = 1e-8,
)
'''
Try scaling the data.
'''
musicScaled = DataFrame()
def setupScales(grid):
global min, max
min = list(grid[0])
max = list(grid[0])
for row in range(1, len(grid)):
for col in range(len(grid[row])):
cell = grid[row][col]
if cell < min[col]:
min[col] = cell
if cell > max[col]:
max[col] = cell
def scaleGrid(grid):
newGrid = []
for row in range(len(grid)):
newRow = []
for col in range(len(grid[row])):
try:
cell = grid[row][col]
scaled = (cell - min[col]) \
/ (max[col] - min[col])
newRow.append(scaled)
except:
pass
newGrid.append(newRow)
return newGrid
setupScales(musicATRB.data)
musicScaled.data = scaleGrid(musicATRB.data)
musicScaled.feature_names = musicATRB.feature_names
musicScaled.target = musicATRB.target
musicScaled.target_names = musicATRB.target_names
Examples = {
'musicDefault': {
'frame': musicATRB,
},
'MusicSGD': {
'frame': musicATRB,
'mlpc': mlpc
},
'MusisScaled': {
'frame': musicScaled,
},
} | mit |
chrisdunelm/grpc | test/http2_test/test_rst_after_header.py | 30 | 1214 | # Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import http2_base_server
class TestcaseRstStreamAfterHeader(object):
"""
In response to an incoming request, this test sends headers, followed by
a reset stream frame. Client asserts that the RPC failed.
"""
def __init__(self):
self._base_server = http2_base_server.H2ProtocolBaseServer()
self._base_server._handlers['RequestReceived'] = self.on_request_received
def get_base_server(self):
return self._base_server
def on_request_received(self, event):
# send initial headers
self._base_server.on_request_received_default(event)
# send reset stream
self._base_server.send_reset_stream()
| apache-2.0 |
wgwoods/anaconda | tests/dracut_tests/test_driver_updates.py | 3 | 27177 | # test_driver_updates.py - unittests for driver_updates.py
import unittest
try:
import unittest.mock as mock
except ImportError:
import mock
import os
import tempfile
import shutil
import sys
sys.path.append(os.path.normpath(os.path.dirname(__file__)+'/../../dracut'))
from driver_updates import copy_files, move_files, iter_files, ensure_dir
from driver_updates import append_line, mkdir_seq
def touch(path):
try:
open(path, 'a')
except IOError as e:
if e.errno != 17: raise
def makedir(path):
ensure_dir(path)
return path
def makefile(path):
makedir(os.path.dirname(path))
touch(path)
return path
def makefiles(*paths):
return [makefile(p) for p in paths]
class FileTestCaseBase(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix="test_driver_updates.")
self.srcdir = self.tmpdir+'/src/'
self.destdir = self.tmpdir+'/dest/'
def tearDown(self):
shutil.rmtree(self.tmpdir, ignore_errors=True)
def makefiles(self, *paths):
return [makefile(os.path.normpath(self.tmpdir+'/'+p)) for p in paths]
class SelfTestCase(FileTestCaseBase):
def test_makefiles(self):
"""check test helpers"""
filepaths = ["sub/dir/test.file", "testfile"]
self.makefiles(*filepaths)
for f in filepaths:
self.assertTrue(os.path.exists(self.tmpdir+'/'+f))
class TestCopyFiles(FileTestCaseBase):
def test_basic(self):
"""copy_file: copy files into destdir, leaving existing contents"""
files = self.makefiles("src/file1", "src/subdir/file2")
self.makefiles("dest/file3")
copy_files(files, self.destdir)
result = set(os.listdir(self.destdir))
self.assertEqual(result, set(["file1", "file2", "file3"]))
def test_overwrite(self):
"""copy_file: overwrite files in destdir if they have the same name"""
src, dest = self.makefiles("src/file1", "dest/file1")
with open(src, 'w') as outf:
outf.write("srcfile")
with open(dest, 'w') as outf:
outf.write("destfile")
copy_files([src], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "srcfile")
def test_samefile(self):
"""copy_file: skip files already in destdir"""
(dest,) = self.makefiles("dest/file1")
with open(dest, 'w') as outf:
outf.write("destfile")
copy_files([dest], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "destfile")
def test_copy_to_parent(self):
"""copy_file: skip files in subdirs of destdir"""
files = self.makefiles("dest/subdir/file1")
copy_files(files, self.destdir)
self.assertEqual(list(iter_files(self.destdir)), files)
class TestIterFiles(FileTestCaseBase):
def test_basic(self):
"""iter_files: iterates over full paths to files under topdir"""
files = set(self.makefiles("src/file1", "dest/file2", "src/sub/file3"))
makedir(self.tmpdir+'/empty/dir')
result = set(iter_files(self.tmpdir))
self.assertEqual(files, result)
def test_pattern(self):
"""iter_files: match filename against glob pattern"""
self.makefiles("src/file1.so", "src/sub.ko/file2")
goodfiles = set(self.makefiles("src/sub/file1.ko", "src/file2.ko.xz"))
result = set(iter_files(self.tmpdir, pattern="*.ko*"))
self.assertEqual(result, goodfiles)
class TestMoveFiles(FileTestCaseBase):
def test_basic(self):
"""move_files: move files to destdir"""
files = self.makefiles("src/file1", "src/subdir/file2")
move_files(files, self.destdir)
self.assertEqual(set(os.listdir(self.destdir)), set(["file1", "file2"]))
self.assertEqual(list(iter_files(self.srcdir)), [])
def test_overwrite(self):
"""move_files: overwrite files with the same name"""
src, dest = self.makefiles("src/file1", "dest/file1")
with open(src, 'w') as outf:
outf.write("srcfile")
with open(dest, 'w') as outf:
outf.write("destfile")
move_files([src], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "srcfile")
self.assertEqual(list(iter_files(self.srcdir)), [])
def test_samefile(self):
"""move_files: leave files alone if they're already in destdir"""
(dest,) = self.makefiles("dest/file1")
with open(dest, 'w') as outf:
outf.write("destfile")
move_files([dest], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "destfile")
def test_move_to_parent(self):
"""move_files: leave files alone if they're in a subdir of destdir"""
files = set(self.makefiles("dest/subdir/file1", "dest/file2"))
move_files(files, self.destdir)
self.assertEqual(set(iter_files(self.destdir)), files)
class TestAppendLine(FileTestCaseBase):
def test_empty(self):
"""append_line: create file + append \\n when needed"""
line = "this is a line of text with no newline"
outfile = self.tmpdir+'/outfile'
append_line(outfile, line)
self.assertEqual(open(outfile).read(), line+'\n')
def test_append(self):
"""append_line: adds a line to the end of an existing file"""
oldlines = ["line one", "line two", "and I'm line three"]
outfile = self.tmpdir+'/outfile'
with open(outfile, 'w') as outf:
for line in oldlines:
outf.write(line+'\n')
line = "this line contains a newline already\n"
append_line(outfile, line)
self.assertEqual(open(outfile).read(), '\n'.join(oldlines+[line]))
from driver_updates import read_lines
class TestReadLine(FileTestCaseBase):
def test_empty(self):
"""read_lines: return [] for empty file"""
[empty] = self.makefiles("emptyfile")
self.assertEqual(read_lines(empty), [])
def test_missing(self):
"""read_lines: return [] for missing file"""
self.assertEqual(read_lines(self.tmpdir+'/no-such-file'),[])
def test_readlines(self):
"""read_lines: returns a list of lines without trailing newlines"""
filedata = 'line one\nline two\n\nline four\n'
outfile = self.tmpdir+'/outfile'
with open(outfile, 'w') as outf:
outf.write(filedata)
lines = read_lines(outfile)
self.assertEqual(lines, ['line one', 'line two','','line four'])
def test_readline_and_append_line(self):
"""read_lines: returns items as passed to append_line"""
filename = self.tmpdir+'/outfile'
items = ["one", "two", "five"]
for i in items:
append_line(filename, i)
self.assertEqual(items, read_lines(filename))
class TestMkdirSeq(FileTestCaseBase):
def test_basic(self):
"""mkdir_seq: first dir ends with 1"""
newdir = mkdir_seq(self.srcdir+'/DD-')
self.assertEqual(newdir, self.srcdir+'/DD-1')
self.assertTrue(os.path.isdir(newdir))
def test_one_exists(self):
"""mkdir_seq: increment number if file exists"""
firstdir = mkdir_seq(self.srcdir+'/DD-')
newdir = mkdir_seq(self.srcdir+'/DD-')
self.assertEqual(newdir, self.srcdir+'/DD-2')
self.assertTrue(os.path.isdir(newdir))
self.assertTrue(os.path.isdir(firstdir))
from driver_updates import find_repos, save_repo, ARCH
# As far as we know, this is what makes a valid repo: rhdd3 + rpms/`uname -m`/
def makerepo(topdir, desc=None):
descfile = makefile(topdir+'/rhdd3')
if not desc:
desc = os.path.basename(topdir)
with open(descfile, "w") as outf:
outf.write(desc+"\n")
makedir(topdir+'/rpms/'+ARCH)
class TestFindRepos(FileTestCaseBase):
def test_basic(self):
"""find_repos: return RPM dir if a valid repo is found"""
makerepo(self.tmpdir)
repos = find_repos(self.tmpdir)
self.assertEqual(repos, [self.tmpdir+'/rpms/'+ARCH])
self.assertTrue(os.path.isdir(repos[0]))
def test_multiple_subdirs(self):
"""find_repos: descend multiple subdirs if needed"""
makerepo(self.tmpdir+'/driver1')
makerepo(self.tmpdir+'/sub/driver1')
makerepo(self.tmpdir+'/sub/driver2')
repos = find_repos(self.tmpdir)
self.assertEqual(len(repos),3)
class TestSaveRepo(FileTestCaseBase):
def test_basic(self):
"""save_repo: copies a directory to /run/install/DD-X"""
makerepo(self.srcdir)
repo = find_repos(self.srcdir)[0]
makefile(repo+'/fake-something.rpm')
saved = save_repo(repo, target=self.destdir)
self.assertEqual(set(os.listdir(saved)), set(["fake-something.rpm"]))
self.assertEqual(saved, os.path.join(self.destdir, "DD-1"))
from driver_updates import mount, umount, mounted
class MountTestCase(unittest.TestCase):
@mock.patch('driver_updates.mkdir_seq')
@mock.patch('driver_updates.subprocess.check_call')
def test_mkdir(self, check_call, mkdir):
"""mount: makes mountpoint if needed"""
dev, mnt = '/dev/fake', '/media/DD-1'
mkdir.return_value = mnt
mountpoint = mount(dev)
mkdir.assert_called_once_with('/media/DD-')
check_call.assert_called_once_with(["mount", dev, mnt])
self.assertEqual(mnt, mountpoint)
@mock.patch('driver_updates.mkdir_seq')
@mock.patch('driver_updates.subprocess.check_call')
def test_basic(self, check_call, mkdir):
"""mount: calls mount(8) to mount a device/image"""
dev, mnt = '/dev/fake', '/media/fake'
mount(dev, mnt)
check_call.assert_called_once_with(["mount", dev, mnt])
self.assertFalse(mkdir.called)
@mock.patch('driver_updates.subprocess.call')
def test_umount(self, call):
"""umount: calls umount(8)"""
mnt = '/mnt/fake'
umount(mnt)
call.assert_called_once_with(["umount", mnt])
@mock.patch('driver_updates.mount')
@mock.patch('driver_updates.umount')
def test_mount_manager(self, mock_umount, mock_mount):
"""mounted: context manager mounts/umounts as expected"""
dev, mnt = '/dev/fake', '/media/fake'
mock_mount.return_value = mnt
with mounted(dev, mnt) as mountpoint:
mock_mount.assert_called_once_with(dev, mnt)
self.assertFalse(mock_umount.called)
self.assertEqual(mountpoint, mnt)
mock_umount.assert_called_once_with(mnt)
# NOTE: dd_list and dd_extract get tested pretty thoroughly in tests/dd_tests,
# so this is a slightly higher-level test case
from driver_updates import dd_list, dd_extract, Driver
fake_module = Driver(
source='/repo/path/to/fake-driver-1.0-1.rpm',
name='fake-driver',
flags='modules firmwares',
description='Wow this is totally a fake driver.\nHooray for this',
repo='/repo/path/to'
)
fake_enhancement = Driver(
source='/repo/path/to/fake-enhancement-1.0-1.rpm',
name='fake-enhancement',
flags='binaries libraries',
description='This is enhancing the crap out of the installer.\n\nYeah.',
repo=fake_module.repo
)
def dd_list_output(driver):
out='{0.source}\n{0.name}\n{0.flags}\n{0.description}\n---\n'.format(driver)
return out.encode('utf-8')
class DDUtilsTestCase(unittest.TestCase):
@mock.patch("driver_updates.subprocess.check_output")
def test_dd_list(self, check_output):
"""dd_list: returns a list of Driver objects parsed from output"""
output = dd_list_output(fake_module)+dd_list_output(fake_enhancement)
check_output.return_value = output
anaconda, kernel = '19.0', os.uname()[2]
result = dd_list(fake_module.repo)
cmd = check_output.call_args[0][0]
self.assertIn(kernel, cmd)
self.assertIn(anaconda, cmd)
self.assertIn(fake_module.repo, cmd)
self.assertTrue(cmd[0].endswith("dd_list"))
self.assertEqual(len(result), 2)
mod, enh = sorted(result, key=lambda d: d.name)
self.assertEqual(mod.__dict__, fake_module.__dict__)
self.assertEqual(enh.__dict__, fake_enhancement.__dict__)
@mock.patch("driver_updates.subprocess.check_output")
def test_dd_extract(self, check_output):
"""dd_extract: call binary with expected arguments"""
rpm = "/some/kind/of/path.rpm"
outdir = "/output/dir"
dd_extract(rpm, outdir)
cmd = check_output.call_args[0][0]
self.assertIn(os.uname()[2], cmd)
self.assertIn(rpm, cmd)
self.assertIn(outdir, cmd)
self.assertIn("-blmf", cmd)
self.assertTrue(cmd[0].endswith("dd_extract"))
from driver_updates import extract_drivers, grab_driver_files, load_drivers
@mock.patch("driver_updates.ensure_dir")
@mock.patch("driver_updates.save_repo")
@mock.patch("driver_updates.append_line")
@mock.patch("driver_updates.dd_extract")
class ExtractDriversTestCase(unittest.TestCase):
def test_drivers(self, mock_extract, mock_append, mock_save, *args):
"""extract_drivers: save repo, write pkglist"""
extract_drivers(drivers=[fake_enhancement, fake_module])
# extracts all listed modules
mock_extract.assert_has_calls([
mock.call(fake_enhancement.source, "/updates"),
mock.call(fake_module.source, "/updates")
], any_order=True)
pkglist = "/run/install/dd_packages"
mock_append.assert_called_once_with(pkglist, fake_module.name)
mock_save.assert_called_once_with(fake_module.repo)
def test_enhancements(self, mock_extract, mock_append, mock_save, *args):
"""extract_drivers: extract selected drivers, don't save enhancements"""
extract_drivers(drivers=[fake_enhancement])
mock_extract.assert_called_once_with(
fake_enhancement.source, "/updates"
)
self.assertFalse(mock_append.called)
self.assertFalse(mock_save.called)
def test_repo(self, mock_extract, mock_append, mock_save, *args):
"""extract_drivers(repos=[...]) extracts all drivers from named repos"""
with mock.patch("driver_updates.dd_list", side_effect=[
[fake_enhancement],
[fake_enhancement, fake_module]]):
extract_drivers(repos=['enh_repo', 'mod_repo'])
mock_extract.assert_has_calls([
mock.call(fake_enhancement.source, "/updates"),
mock.call(fake_enhancement.source, "/updates"),
mock.call(fake_module.source, "/updates")
])
pkglist = "/run/install/dd_packages"
mock_append.assert_called_once_with(pkglist, fake_module.name)
mock_save.assert_called_once_with(fake_module.repo)
class GrabDriverFilesTestCase(FileTestCaseBase):
def test_basic(self):
"""grab_driver_files: copy drivers into place, return module list"""
# create a bunch of fake extracted files
outdir = self.tmpdir + '/extract-outdir'
moddir = outdir + "/lib/modules/%s/kernel/" % os.uname()[2]
fwdir = outdir + "/lib/firmware/"
modules = makefiles(moddir+"net/funk.ko", moddir+"fs/lolfs.ko.xz")
firmware = makefiles(fwdir+"funk.fw")
makefiles(outdir+"/usr/bin/monkey", outdir+"/other/dir/blah.ko")
mod_upd_dir = self.tmpdir+'/module-updates'
fw_upd_dir = self.tmpdir+'/fw-updates'
# use our updates dirs instead of the default updates dirs
with mock.patch.multiple("driver_updates",
MODULE_UPDATES_DIR=mod_upd_dir,
FIRMWARE_UPDATES_DIR=fw_upd_dir):
modnames = grab_driver_files(outdir)
self.assertEqual(set(modnames), set(["funk", "lolfs"]))
modfiles = set(['funk.ko', 'lolfs.ko.xz'])
fwfiles = set(['funk.fw'])
# modules/firmware are *not* in their old locations
self.assertEqual([f for f in modules+firmware if os.path.exists(f)], [])
# modules are in the system's updates dir
self.assertEqual(set(os.listdir(mod_upd_dir)), modfiles)
# modules are also in outdir's updates dir
self.assertEqual(set(os.listdir(outdir+'/'+mod_upd_dir)), modfiles)
# repeat for firmware
self.assertEqual(set(os.listdir(fw_upd_dir)), fwfiles)
self.assertEqual(set(os.listdir(outdir+'/'+fw_upd_dir)), fwfiles)
class LoadDriversTestCase(unittest.TestCase):
@mock.patch("driver_updates.subprocess.call")
def test_basic(self, call):
"""load_drivers: runs depmod and modprobes all named modules"""
modnames = ['mod1', 'mod2']
load_drivers(modnames)
call.assert_has_calls([
mock.call(["depmod", "-a"]),
mock.call(["modprobe", "-a"] + modnames)
])
from driver_updates import process_driver_disk
class ProcessDriverDiskTestCase(unittest.TestCase):
def setUp(self):
# an iterable that returns fake mountpoints, for mocking mount()
self.fakemount = ["/mnt/DD-%i" % n for n in range(1,10)]
# an iterable that returns fake repos, for mocking find_repos()
self.frepo = {
'/mnt/DD-1': ['/mnt/DD-1/repo1'],
'/mnt/DD-2': ['/mnt/DD-2/repo1', '/mnt/DD-2/repo2'],
}
# fake iso listings for iso_dir
self.fiso = {
'/mnt/DD-1': [],
'/mnt/DD-2': [],
'/mnt/DD-3': [],
}
# a context-manager object to be returned by the mock mounted()
mounted_ctx = mock.MagicMock(
__enter__=mock.MagicMock(side_effect=self.fakemount), # mount
__exit__=mock.MagicMock(return_value=None), # umount
)
self.modlist = []
# set up our patches
patches = (
mock.patch("driver_updates.mounted", return_value=mounted_ctx),
mock.patch("driver_updates.find_repos", side_effect=self.frepo.get),
mock.patch("driver_updates.find_isos", side_effect=self.fiso.get),
mock.patch("driver_updates.extract_drivers", return_value=True),
mock.patch("driver_updates.load_drivers"),
mock.patch('driver_updates.grab_driver_files',
side_effect=lambda: self.modlist),
)
self.mocks = {p.attribute:p.start() for p in patches}
for p in patches: self.addCleanup(p.stop)
def test_basic(self):
"""process_driver_disk: mount disk, extract RPMs, grab + load drivers"""
dev = '/dev/fake'
process_driver_disk(dev)
# did we mount the initial device, and then the .iso we find therein?
self.mocks['mounted'].assert_called_once_with(dev)
self.mocks['extract_drivers'].assert_called_once_with(repos=self.frepo['/mnt/DD-1'])
self.mocks['grab_driver_files'].assert_called_once_with()
self.mocks['load_drivers'].assert_called_once_with(self.modlist)
def test_recursive(self):
"""process_driver_disk: recursively process .isos at toplevel"""
dev = '/dev/fake'
# first mount has no repos, but an iso
self.frepo['/mnt/DD-1'] = []
self.fiso['/mnt/DD-1'].append('magic.iso')
self.fiso['/mnt/DD-2'].append('ignored.iso')
process_driver_disk(dev)
# did we mount the initial device, and the iso therein?
# also: we ignore ignored.iso because magic.iso is a proper DD
self.mocks['mounted'].assert_has_calls([
mock.call(dev), mock.call('magic.iso')
])
# we extracted drivers from the repo(s) in magic.iso
self.mocks['extract_drivers'].assert_called_once_with(repos=self.frepo['/mnt/DD-2'])
self.mocks['grab_driver_files'].assert_called_once_with()
self.mocks['load_drivers'].assert_called_once_with(self.modlist)
def test_no_drivers(self):
"""process_driver_disk: don't run depmod etc. if no new drivers"""
dev = '/dev/fake'
self.mocks['extract_drivers'].return_value = False
process_driver_disk(dev)
self.assertFalse(self.mocks['grab_driver_files'].called)
self.assertFalse(self.mocks['load_drivers'].called)
from driver_updates import process_driver_rpm
class ProcessDriverRPMTestCase(unittest.TestCase):
def setUp(self):
self.frepo = {
'/tmp/fake': ['/mnt/DD-1'],
}
self.modlist = []
# set up our patches
patches = (
mock.patch("driver_updates.find_repos", side_effect=self.frepo.get),
mock.patch("driver_updates.extract_drivers", return_value=True),
mock.patch("driver_updates.load_drivers"),
mock.patch('driver_updates.grab_driver_files',
side_effect=lambda: self.modlist),
)
self.mocks = {p.attribute:p.start() for p in patches}
for p in patches: self.addCleanup(p.stop)
def test_basic(self):
"""process_driver_rpm: extract RPM, grab + load driver"""
rpm = '/tmp/fake/driver.rpm'
process_driver_rpm(rpm)
self.mocks['extract_drivers'].assert_called_once_with(repos=["/tmp/fake"])
self.mocks['grab_driver_files'].assert_called_once_with()
self.mocks['load_drivers'].assert_called_once_with(self.modlist)
from driver_updates import finish, mark_finished, all_finished
class FinishedTestCase(FileTestCaseBase):
def test_mark_finished(self):
"""mark_finished: appends a line to /tmp/dd_finished"""
requeststr = "WOW SOMETHING OR OTHER"
mark_finished(requeststr, topdir=self.tmpdir)
finished = self.tmpdir+'/dd_finished'
self.assertTrue(os.path.exists(finished))
self.assertEqual(read_lines(finished), [requeststr])
def test_all_finished(self):
"""all_finished: True if all lines from dd_todo are in dd_finished"""
todo = self.tmpdir+'/dd_todo'
requests = ['one', 'two', 'final thingy']
with open(todo, 'w') as outf:
outf.write(''.join(r+'\n' for r in requests))
self.assertEqual(set(read_lines(todo)), set(requests))
for r in reversed(requests):
self.assertFalse(all_finished(topdir=self.tmpdir))
mark_finished(r, topdir=self.tmpdir)
self.assertTrue(all_finished(topdir=self.tmpdir))
def test_extra_finished(self):
"""all_finished: True if dd_finished has more items than dd_todo"""
self.test_all_finished()
mark_finished("BONUS", topdir=self.tmpdir)
self.assertTrue(all_finished(topdir=self.tmpdir))
def test_finish(self):
"""finish: mark request finished, and write dd.done if all complete"""
todo = self.tmpdir+'/dd_todo'
done = self.tmpdir+'/dd.done'
requests = ['one', 'two', 'final thingy']
with open(todo, 'w') as outf:
outf.write(''.join(r+'\n' for r in requests))
for r in reversed(requests):
print("marking %s" % r)
self.assertFalse(os.path.exists(done))
finish(r, topdir=self.tmpdir)
self.assertTrue(os.path.exists(done))
from driver_updates import get_deviceinfo, DeviceInfo
blkid_out = b'''\
DEVNAME=/dev/sda2
UUID=0f21a3d1-dcd3-4ab4-a292-c5556850d561
TYPE=ext4
DEVNAME=/dev/sda1
UUID=C53C-EE46
TYPE=vfat
DEVNAME=/dev/sda3
UUID=4126dbb6-c7d3-47b4-b1fc-9bb461df0067
TYPE=btrfs
DEVNAME=/dev/loop0
UUID=6f16967e-0388-4276-bd8d-b88e5b217a55
TYPE=ext4
'''
disk_labels = {
'/dev/sdb1': 'metroid_srv',
'/dev/loop0': 'I\\x20\u262d\\x20COMMUNISM',
'/dev/sda3': 'metroid_root'
}
devicelist = [
DeviceInfo(DEVNAME='/dev/sda2', TYPE='ext4',
UUID='0f21a3d1-dcd3-4ab4-a292-c5556850d561'),
DeviceInfo(DEVNAME='/dev/sda1', TYPE='vfat',
UUID='C53C-EE46'),
DeviceInfo(DEVNAME='/dev/sda3', TYPE='btrfs', LABEL='metroid_root',
UUID='4126dbb6-c7d3-47b4-b1fc-9bb461df0067'),
DeviceInfo(DEVNAME='/dev/loop0', TYPE='ext4',
LABEL='I\\x20\u262d\\x20COMMUNISM',
UUID='6f16967e-0388-4276-bd8d-b88e5b217a55'),
]
# also covers blkid, get_disk_labels, DeviceInfo
class DeviceInfoTestCase(unittest.TestCase):
@mock.patch('driver_updates.subprocess.check_output',return_value=blkid_out)
@mock.patch('driver_updates.get_disk_labels',return_value=disk_labels)
def test_basic(self, get_disk_labels, check_output):
"""get_deviceinfo: parses DeviceInfo from blkid etc."""
disks = get_deviceinfo()
self.assertEqual(len(disks), 4)
disks.sort(key=lambda d: d.device)
loop, efi, boot, root = disks
self.assertEqual(vars(boot), vars(devicelist[0]))
self.assertEqual(vars(efi), vars(devicelist[1]))
self.assertEqual(vars(root), vars(devicelist[2]))
self.assertEqual(vars(loop), vars(devicelist[3]))
def test_shortdev(self):
d = DeviceInfo(DEVNAME="/dev/disk/by-label/OEMDRV")
with mock.patch("os.path.realpath", return_value="/dev/i2o/hdb"):
self.assertEqual(d.shortdev, "i2o/hdb")
# TODO: test TextMenu itself
# py2/3 compat
if sys.version_info.major == 3:
from io import StringIO
else:
from io import BytesIO as StringIO
from driver_updates import device_menu
class DeviceMenuTestCase(unittest.TestCase):
def setUp(self):
patches = (
mock.patch('driver_updates.get_deviceinfo',return_value=devicelist),
)
self.mocks = {p.attribute:p.start() for p in patches}
for p in patches: self.addCleanup(p.stop)
def test_device_menu_exit(self):
"""device_menu: 'c' exits the menu"""
with mock.patch('driver_updates._input', side_effect=['c']):
dev = device_menu()
self.assertEqual(dev, [])
self.assertEqual(self.mocks['get_deviceinfo'].call_count, 1)
def test_device_menu_refresh(self):
"""device_menu: 'r' makes the menu refresh"""
with mock.patch('driver_updates._input', side_effect=['r','c']):
device_menu()
self.assertEqual(self.mocks['get_deviceinfo'].call_count, 2)
@mock.patch("sys.stdout", new_callable=StringIO)
def test_device_menu(self, stdout):
"""device_menu: choosing a number returns that Device"""
choose_num='2'
with mock.patch('driver_updates._input', return_value=choose_num):
result = device_menu()
# if you hit '2' you should get the corresponding device from the list
self.assertEqual(len(result), 1)
dev = result[0]
self.assertEqual(vars(dev), vars(devicelist[int(choose_num)-1]))
# find the corresponding line on-screen
screen = [l.strip() for l in stdout.getvalue().splitlines()]
match = [l for l in screen if l.startswith(choose_num+')')]
self.assertEqual(len(match), 1)
line = match.pop(0)
# the device name (at least) should be on this line
self.assertIn(os.path.basename(dev.device), line)
| gpl-2.0 |
airbnb/streamalert | tests/unit/streamalert/shared/test_utils.py | 1 | 4360 | """Tests for streamalert/shared/utils.py"""
import json
from nose.tools import assert_equal, assert_false
from streamalert.shared import utils
from streamalert.shared.normalize import Normalizer
MOCK_RECORD_ID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
def test_valid_ip():
"""Utils - Valid IP"""
test_ip_valid = '127.0.0.1'
assert_equal(utils.valid_ip(test_ip_valid), True)
test_ip_invalid = 'test [1234]'
assert_equal(utils.valid_ip(test_ip_invalid), False)
def test_in_network_invalid_ip():
"""Utils - In Network - Invalid IP"""
assert_false(utils.in_network('a string that is not an ip', {'10.0.100.0/24'}))
def test_in_network_invalid_cidr():
"""Utils - In Network - Invalid CIDR"""
assert_false(utils.in_network('127.0.0.1', {'not a cidr'}))
def test_in_network():
"""Utils - In Network"""
cidrs = {
'10.0.16.0/24',
'10.0.17.0/24'
}
ip_in_cidr = '10.0.16.24'
assert_equal(utils.in_network(ip_in_cidr, cidrs), True)
ip_not_in_cidr = '10.0.15.24'
assert_equal(utils.in_network(ip_not_in_cidr, cidrs), False)
def test_get_first_key():
"""Utils - Get First Key"""
data = {
'path': 'ABC',
'details': {
'parent': {
'path': 'DEF',
}
},
'empty_dict': {},
'empty_list': [],
'events': [
{
'path': 'GHI'
}
]
}
# 'path' is a top-level key and so should always be returned first
assert_equal('ABC', utils.get_first_key(data, 'path'))
# dicts and lists can be returned as well
assert_equal(data['details'], utils.get_first_key(data, 'details'))
# None is returned by default if no value is found
assert_equal(None, utils.get_first_key(data, 'no-key-found'))
# Custom default value is returned if specified
assert_equal({}, utils.get_first_key(data, 'no-key-found', {}))
def test_get_keys():
"""Utils - Get Keys"""
data = {
'path': 'ABC',
'details': {
'parent': {
'path': 'DEF'
}
},
'empty_dict': {},
'empty_list': [],
'events': [
{
'path': 'GHI'
}
]
}
assert_equal({'ABC', 'DEF', 'GHI'}, set(utils.get_keys(data, 'path')))
assert_equal(2, len(utils.get_keys(data, 'path', max_matches=2)))
assert_equal([], utils.get_keys({}, 'path'))
def generate_categorized_records(normalized=False, count=2):
"""Generate categorized records by source types"""
json_data = [
{'key_{}'.format(cnt): 'value_{}'.format(cnt)} for cnt in range(count)
]
if normalized:
for data in json_data:
data[Normalizer.NORMALIZATION_KEY] = {
'normalized_type1': [
{
'values': ['value1'],
'function': None
}
],
'normalized_type2': [
{
'values': ['value2', 'value3'],
'function': None,
'send_to_artifacts': True
}
],
'normalized_type3': [
{
'values': ['value2', 'value3'],
'function': None,
'send_to_artifacts': False
}
]
}
return {
'log_type_01_sub_type_01': json_data
}
def generate_artifacts(firehose_records=False):
"""Generate sample artifacts for unit tests"""
normalized_values = [
('normalized_type1', 'value1'),
('normalized_type2', 'value2'),
('normalized_type2', 'value3'),
('normalized_type1', 'value1'),
('normalized_type2', 'value2'),
('normalized_type2', 'value3')
]
artifacts = [
{
'function': 'None',
'streamalert_record_id': MOCK_RECORD_ID,
'source_type': 'log_type_01_sub_type_01',
'type': type,
'value': value
} for type, value in normalized_values
]
if firehose_records:
return [
json.dumps(artifact, separators=(',', ':')) + '\n' for artifact in artifacts
]
return artifacts
| apache-2.0 |
vertcoin/vertcoin | test/functional/rpc_users.py | 6 | 4247 | #!/usr/bin/env python3
# Copyright (c) 2015-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiple RPC users."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
get_datadir_path,
str_to_b64str,
)
import os
import http.client
import urllib.parse
import subprocess
from random import SystemRandom
import string
import configparser
import sys
def call_with_auth(node, user, password):
url = urllib.parse.urlparse(node.url)
headers = {"Authorization": "Basic " + str_to_b64str('{}:{}'.format(user, password))}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
conn.close()
return resp
class HTTPBasicsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.supports_cli = False
def setup_chain(self):
super().setup_chain()
#Append rpcauth to bitcoin.conf before initialization
self.rtpassword = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
self.rpcuser = "rpcuser💻"
self.rpcpassword = "rpcpassword🔑"
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
gen_rpcauth = config['environment']['RPCAUTH']
# Generate RPCAUTH with specified password
self.rt2password = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
p = subprocess.Popen([sys.executable, gen_rpcauth, 'rt2', self.rt2password], stdout=subprocess.PIPE, universal_newlines=True)
lines = p.stdout.read().splitlines()
rpcauth2 = lines[1]
# Generate RPCAUTH without specifying password
self.user = ''.join(SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(10))
p = subprocess.Popen([sys.executable, gen_rpcauth, self.user], stdout=subprocess.PIPE, universal_newlines=True)
lines = p.stdout.read().splitlines()
rpcauth3 = lines[1]
self.password = lines[3]
with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
f.write(rpcauth3+"\n")
with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write("rpcuser={}\n".format(self.rpcuser))
f.write("rpcpassword={}\n".format(self.rpcpassword))
def test_auth(self, node, user, password):
self.log.info('Correct...')
assert_equal(200, call_with_auth(node, user, password).status)
self.log.info('Wrong...')
assert_equal(401, call_with_auth(node, user, password+'wrong').status)
self.log.info('Wrong...')
assert_equal(401, call_with_auth(node, user+'wrong', password).status)
self.log.info('Wrong...')
assert_equal(401, call_with_auth(node, user+'wrong', password+'wrong').status)
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
self.test_auth(self.nodes[0], url.username, url.password)
self.test_auth(self.nodes[0], 'rt', self.rtpassword)
self.test_auth(self.nodes[0], 'rt2', self.rt2password)
self.test_auth(self.nodes[0], self.user, self.password)
###############################################################
# Check correctness of the rpcuser/rpcpassword config options #
###############################################################
url = urllib.parse.urlparse(self.nodes[1].url)
self.test_auth(self.nodes[1], self.rpcuser, self.rpcpassword)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| mit |
Paczesiowa/youtube-dl | youtube_dl/extractor/rtve.py | 58 | 7807 | # encoding: utf-8
from __future__ import unicode_literals
import base64
import re
import time
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
ExtractorError,
float_or_none,
remove_end,
std_headers,
struct_unpack,
)
def _decrypt_url(png):
encrypted_data = base64.b64decode(png.encode('utf-8'))
text_index = encrypted_data.find(b'tEXt')
text_chunk = encrypted_data[text_index - 4:]
length = struct_unpack('!I', text_chunk[:4])[0]
# Use bytearray to get integers when iterating in both python 2.x and 3.x
data = bytearray(text_chunk[8:8 + length])
data = [chr(b) for b in data if b != 0]
hash_index = data.index('#')
alphabet_data = data[:hash_index]
url_data = data[hash_index + 1:]
alphabet = []
e = 0
d = 0
for l in alphabet_data:
if d == 0:
alphabet.append(l)
d = e = (e + 1) % 4
else:
d -= 1
url = ''
f = 0
e = 3
b = 1
for letter in url_data:
if f == 0:
l = int(letter) * 10
f = 1
else:
if e == 0:
l += int(letter)
url += alphabet[l]
e = (b + 3) % 4
f = 0
b += 1
else:
e -= 1
return url
class RTVEALaCartaIE(InfoExtractor):
IE_NAME = 'rtve.es:alacarta'
IE_DESC = 'RTVE a la carta'
_VALID_URL = r'http://www\.rtve\.es/(m/)?alacarta/videos/[^/]+/[^/]+/(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.rtve.es/alacarta/videos/balonmano/o-swiss-cup-masculina-final-espana-suecia/2491869/',
'md5': '1d49b7e1ca7a7502c56a4bf1b60f1b43',
'info_dict': {
'id': '2491869',
'ext': 'mp4',
'title': 'Balonmano - Swiss Cup masculina. Final: España-Suecia',
'duration': 5024.566,
},
}, {
'note': 'Live stream',
'url': 'http://www.rtve.es/alacarta/videos/television/24h-live/1694255/',
'info_dict': {
'id': '1694255',
'ext': 'flv',
'title': 'TODO',
},
'skip': 'The f4m manifest can\'t be used yet',
}, {
'url': 'http://www.rtve.es/m/alacarta/videos/cuentame-como-paso/cuentame-como-paso-t16-ultimo-minuto-nuestra-vida-capitulo-276/2969138/?media=tve',
'only_matching': True,
}]
def _real_initialize(self):
user_agent_b64 = base64.b64encode(std_headers['User-Agent'].encode('utf-8')).decode('utf-8')
manager_info = self._download_json(
'http://www.rtve.es/odin/loki/' + user_agent_b64,
None, 'Fetching manager info')
self._manager = manager_info['manager']
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
info = self._download_json(
'http://www.rtve.es/api/videos/%s/config/alacarta_videos.json' % video_id,
video_id)['page']['items'][0]
if info['state'] == 'DESPU':
raise ExtractorError('The video is no longer available', expected=True)
png_url = 'http://www.rtve.es/ztnr/movil/thumbnail/%s/videos/%s.png' % (self._manager, video_id)
png = self._download_webpage(png_url, video_id, 'Downloading url information')
video_url = _decrypt_url(png)
if not video_url.endswith('.f4m'):
auth_url = video_url.replace(
'resources/', 'auth/resources/'
).replace('.net.rtve', '.multimedia.cdn.rtve')
video_path = self._download_webpage(
auth_url, video_id, 'Getting video url')
# Use mvod1.akcdn instead of flash.akamaihd.multimedia.cdn to get
# the right Content-Length header and the mp4 format
video_url = compat_urlparse.urljoin(
'http://mvod1.akcdn.rtve.es/', video_path)
subtitles = None
if info.get('sbtFile') is not None:
subtitles = self.extract_subtitles(video_id, info['sbtFile'])
return {
'id': video_id,
'title': info['title'],
'url': video_url,
'thumbnail': info.get('image'),
'page_url': url,
'subtitles': subtitles,
'duration': float_or_none(info.get('duration'), scale=1000),
}
def _get_subtitles(self, video_id, sub_file):
subs = self._download_json(
sub_file + '.json', video_id,
'Downloading subtitles info')['page']['items']
return dict(
(s['lang'], [{'ext': 'vtt', 'url': s['src']}])
for s in subs)
class RTVEInfantilIE(InfoExtractor):
IE_NAME = 'rtve.es:infantil'
IE_DESC = 'RTVE infantil'
_VALID_URL = r'https?://(?:www\.)?rtve\.es/infantil/serie/(?P<show>[^/]*)/video/(?P<short_title>[^/]*)/(?P<id>[0-9]+)/'
_TESTS = [{
'url': 'http://www.rtve.es/infantil/serie/cleo/video/maneras-vivir/3040283/',
'md5': '915319587b33720b8e0357caaa6617e6',
'info_dict': {
'id': '3040283',
'ext': 'mp4',
'title': 'Maneras de vivir',
'thumbnail': 'http://www.rtve.es/resources/jpg/6/5/1426182947956.JPG',
'duration': 357.958,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
info = self._download_json(
'http://www.rtve.es/api/videos/%s/config/alacarta_videos.json' % video_id,
video_id)['page']['items'][0]
webpage = self._download_webpage(url, video_id)
vidplayer_id = self._search_regex(
r' id="vidplayer([0-9]+)"', webpage, 'internal video ID')
png_url = 'http://www.rtve.es/ztnr/movil/thumbnail/default/videos/%s.png' % vidplayer_id
png = self._download_webpage(png_url, video_id, 'Downloading url information')
video_url = _decrypt_url(png)
return {
'id': video_id,
'ext': 'mp4',
'title': info['title'],
'url': video_url,
'thumbnail': info.get('image'),
'duration': float_or_none(info.get('duration'), scale=1000),
}
class RTVELiveIE(InfoExtractor):
IE_NAME = 'rtve.es:live'
IE_DESC = 'RTVE.es live streams'
_VALID_URL = r'http://www\.rtve\.es/(?:deportes/directo|noticias|television)/(?P<id>[a-zA-Z0-9-]+)'
_TESTS = [{
'url': 'http://www.rtve.es/noticias/directo-la-1/',
'info_dict': {
'id': 'directo-la-1',
'ext': 'flv',
'title': 're:^La 1 de TVE [0-9]{4}-[0-9]{2}-[0-9]{2}Z[0-9]{6}$',
},
'params': {
'skip_download': 'live stream',
}
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
start_time = time.gmtime()
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
player_url = self._search_regex(
r'<param name="movie" value="([^"]+)"/>', webpage, 'player URL')
title = remove_end(self._og_search_title(webpage), ' en directo')
title += ' ' + time.strftime('%Y-%m-%dZ%H%M%S', start_time)
vidplayer_id = self._search_regex(
r' id="vidplayer([0-9]+)"', webpage, 'internal video ID')
png_url = 'http://www.rtve.es/ztnr/movil/thumbnail/default/videos/%s.png' % vidplayer_id
png = self._download_webpage(png_url, video_id, 'Downloading url information')
video_url = _decrypt_url(png)
return {
'id': video_id,
'ext': 'flv',
'title': title,
'url': video_url,
'app': 'rtve-live-live?ovpfv=2.1.2',
'player_url': player_url,
'rtmp_live': True,
}
| unlicense |
PRIMEDesigner15/PRIMEDesigner15 | dependencies/Lib/test/unittests/test_threading_local.py | 167 | 6339 | import unittest
from doctest import DocTestSuite
from test import support
import weakref
import gc
# Modules under test
_thread = support.import_module('_thread')
threading = support.import_module('threading')
import _threading_local
class Weak(object):
pass
def target(local, weaklist):
weak = Weak()
local.weak = weak
weaklist.append(weakref.ref(weak))
class BaseLocalTest:
def test_local_refs(self):
self._local_refs(20)
self._local_refs(50)
self._local_refs(100)
def _local_refs(self, n):
local = self._local()
weaklist = []
for i in range(n):
t = threading.Thread(target=target, args=(local, weaklist))
t.start()
t.join()
del t
gc.collect()
self.assertEqual(len(weaklist), n)
# XXX _threading_local keeps the local of the last stopped thread alive.
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n))
# Assignment to the same thread local frees it sometimes (!)
local.someothervar = None
gc.collect()
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n), (n, len(deadlist)))
def test_derived(self):
# Issue 3088: if there is a threads switch inside the __init__
# of a threading.local derived class, the per-thread dictionary
# is created but not correctly set on the object.
# The first member set may be bogus.
import time
class Local(self._local):
def __init__(self):
time.sleep(0.01)
local = Local()
def f(i):
local.x = i
# Simply check that the variable is correctly set
self.assertEqual(local.x, i)
threads= []
for i in range(10):
t = threading.Thread(target=f, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
def test_derived_cycle_dealloc(self):
# http://bugs.python.org/issue6990
class Local(self._local):
pass
locals = None
passed = False
e1 = threading.Event()
e2 = threading.Event()
def f():
nonlocal passed
# 1) Involve Local in a cycle
cycle = [Local()]
cycle.append(cycle)
cycle[0].foo = 'bar'
# 2) GC the cycle (triggers threadmodule.c::local_clear
# before local_dealloc)
del cycle
gc.collect()
e1.set()
e2.wait()
# 4) New Locals should be empty
passed = all(not hasattr(local, 'foo') for local in locals)
t = threading.Thread(target=f)
t.start()
e1.wait()
# 3) New Locals should recycle the original's address. Creating
# them in the thread overwrites the thread state and avoids the
# bug
locals = [Local() for i in range(10)]
e2.set()
t.join()
self.assertTrue(passed)
def test_arguments(self):
# Issue 1522237
class MyLocal(self._local):
def __init__(self, *args, **kwargs):
pass
MyLocal(a=1)
MyLocal(1)
self.assertRaises(TypeError, self._local, a=1)
self.assertRaises(TypeError, self._local, 1)
def _test_one_class(self, c):
self._failed = "No error message set or cleared."
obj = c()
e1 = threading.Event()
e2 = threading.Event()
def f1():
obj.x = 'foo'
obj.y = 'bar'
del obj.y
e1.set()
e2.wait()
def f2():
try:
foo = obj.x
except AttributeError:
# This is expected -- we haven't set obj.x in this thread yet!
self._failed = "" # passed
else:
self._failed = ('Incorrectly got value %r from class %r\n' %
(foo, c))
sys.stderr.write(self._failed)
t1 = threading.Thread(target=f1)
t1.start()
e1.wait()
t2 = threading.Thread(target=f2)
t2.start()
t2.join()
# The test is done; just let t1 know it can exit, and wait for it.
e2.set()
t1.join()
self.assertFalse(self._failed, self._failed)
def test_threading_local(self):
self._test_one_class(self._local)
def test_threading_local_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_one_class(LocalSubclass)
def _test_dict_attribute(self, cls):
obj = cls()
obj.x = 5
self.assertEqual(obj.__dict__, {'x': 5})
with self.assertRaises(AttributeError):
obj.__dict__ = {}
with self.assertRaises(AttributeError):
del obj.__dict__
def test_dict_attribute(self):
self._test_dict_attribute(self._local)
def test_dict_attribute_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_dict_attribute(LocalSubclass)
def test_cycle_collection(self):
class X:
pass
x = X()
x.local = self._local()
x.local.x = x
wr = weakref.ref(x)
del x
gc.collect()
self.assertIs(wr(), None)
class ThreadLocalTest(unittest.TestCase, BaseLocalTest):
_local = _thread._local
class PyThreadingLocalTest(unittest.TestCase, BaseLocalTest):
_local = _threading_local.local
def test_main():
suite = unittest.TestSuite()
suite.addTest(DocTestSuite('_threading_local'))
suite.addTest(unittest.makeSuite(ThreadLocalTest))
suite.addTest(unittest.makeSuite(PyThreadingLocalTest))
local_orig = _threading_local.local
def setUp(test):
_threading_local.local = _thread._local
def tearDown(test):
_threading_local.local = local_orig
suite.addTest(DocTestSuite('_threading_local',
setUp=setUp, tearDown=tearDown)
)
support.run_unittest(suite)
if __name__ == '__main__':
test_main()
| bsd-3-clause |
mephizzle/wagtail | wagtail/wagtailredirects/models.py | 8 | 2622 | from __future__ import unicode_literals
from django.db import models
from django.utils.six.moves.urllib.parse import urlparse
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel, MultiFieldPanel, PageChooserPanel
class Redirect(models.Model):
old_path = models.CharField(verbose_name=_("Redirect from"), max_length=255, unique=True, db_index=True)
site = models.ForeignKey('wagtailcore.Site', verbose_name=_('Site'), null=True, blank=True, related_name='redirects', db_index=True, editable=False)
is_permanent = models.BooleanField(verbose_name=_("Permanent"), default=True, help_text=_("Recommended. Permanent redirects ensure search engines forget the old page (the 'Redirect from') and index the new page instead."))
redirect_page = models.ForeignKey('wagtailcore.Page', verbose_name=_("Redirect to a page"), null=True, blank=True)
redirect_link = models.URLField(verbose_name=_("Redirect to any URL"), blank=True)
@property
def title(self):
return self.old_path
@property
def link(self):
if self.redirect_page:
return self.redirect_page.url
else:
return self.redirect_link
def get_is_permanent_display(self):
if self.is_permanent:
return "permanent"
else:
return "temporary"
@classmethod
def get_for_site(cls, site=None):
if site:
return cls.objects.filter(models.Q(site=site) | models.Q(site=None))
else:
return cls.objects.all()
@staticmethod
def normalise_path(url):
# Parse url
url_parsed = urlparse(url)
# Path must start with / but not end with /
path = url_parsed[2]
if not path.startswith('/'):
path = '/' + path
if path.endswith('/'):
path = path[:-1]
# Query string components must be sorted alphabetically
query_string = url_parsed[4]
query_string_components = query_string.split('&')
query_string = '&'.join(sorted(query_string_components))
# Add query string to path
if query_string:
path = path + '?' + query_string
return path
def clean(self):
# Normalise old path
self.old_path = Redirect.normalise_path(self.old_path)
class Meta:
verbose_name = _('Redirect')
Redirect.content_panels = [
MultiFieldPanel([
FieldPanel('old_path'),
FieldPanel('is_permanent'),
PageChooserPanel('redirect_page'),
FieldPanel('redirect_link'),
])
]
| bsd-3-clause |
sivel/ansible-modules-extras | cloud/amazon/ec2_vpc_dhcp_options.py | 23 | 15072 | #!/usr/bin/python
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = """
---
module: ec2_vpc_dhcp_options
short_description: Manages DHCP Options, and can ensure the DHCP options for the given VPC match what's
requested
description:
- This module removes, or creates DHCP option sets, and can associate them to a VPC.
Optionally, a new DHCP Options set can be created that converges a VPC's existing
DHCP option set with values provided.
When dhcp_options_id is provided, the module will
1. remove (with state='absent')
2. ensure tags are applied (if state='present' and tags are provided
3. attach it to a VPC (if state='present' and a vpc_id is provided.
If any of the optional values are missing, they will either be treated
as a no-op (i.e., inherit what already exists for the VPC)
To remove existing options while inheriting, supply an empty value
(e.g. set ntp_servers to [] if you want to remove them from the VPC's options)
Most of the options should be self-explanatory.
author: "Joel Thompson (@joelthompson)"
version_added: 2.1
options:
domain_name:
description:
- The domain name to set in the DHCP option sets
required: false
default: None
dns_servers:
description:
- A list of hosts to set the DNS servers for the VPC to. (Should be a
list of IP addresses rather than host names.)
required: false
default: None
ntp_servers:
description:
- List of hosts to advertise as NTP servers for the VPC.
required: false
default: None
netbios_name_servers:
description:
- List of hosts to advertise as NetBIOS servers.
required: false
default: None
netbios_node_type:
description:
- NetBIOS node type to advertise in the DHCP options.
The AWS recommendation is to use 2 (when using netbios name services)
http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html
required: false
default: None
vpc_id:
description:
- VPC ID to associate with the requested DHCP option set.
If no vpc id is provided, and no matching option set is found then a new
DHCP option set is created.
required: false
default: None
delete_old:
description:
- Whether to delete the old VPC DHCP option set when associating a new one.
This is primarily useful for debugging/development purposes when you
want to quickly roll back to the old option set. Note that this setting
will be ignored, and the old DHCP option set will be preserved, if it
is in use by any other VPC. (Otherwise, AWS will return an error.)
required: false
default: true
inherit_existing:
description:
- For any DHCP options not specified in these parameters, whether to
inherit them from the options set already applied to vpc_id, or to
reset them to be empty.
required: false
default: false
tags:
description:
- Tags to be applied to a VPC options set if a new one is created, or
if the resource_id is provided. (options must match)
required: False
default: None
aliases: [ 'resource_tags']
version_added: "2.1"
dhcp_options_id:
description:
- The resource_id of an existing DHCP options set.
If this is specified, then it will override other settings, except tags
(which will be updated to match)
required: False
default: None
version_added: "2.1"
state:
description:
- create/assign or remove the DHCP options.
If state is set to absent, then a DHCP options set matched either
by id, or tags and options will be removed if possible.
required: False
default: present
choices: [ 'absent', 'present' ]
version_added: "2.1"
extends_documentation_fragment: aws
requirements:
- boto
"""
RETURN = """
new_options:
description: The DHCP options created, associated or found
returned: when appropriate
type: dict
sample:
domain-name-servers:
- 10.0.0.1
- 10.0.1.1
netbois-name-servers:
- 10.0.0.1
- 10.0.1.1
netbios-node-type: 2
domain-name: "my.example.com"
dhcp_options_id:
description: The aws resource id of the primary DCHP options set created, found or removed
type: string
returned: when available
changed:
description: Whether the dhcp options were changed
type: bool
returned: always
"""
EXAMPLES = """
# Completely overrides the VPC DHCP options associated with VPC vpc-123456 and deletes any existing
# DHCP option set that may have been attached to that VPC.
- ec2_vpc_dhcp_options:
domain_name: "foo.example.com"
region: us-east-1
dns_servers:
- 10.0.0.1
- 10.0.1.1
ntp_servers:
- 10.0.0.2
- 10.0.1.2
netbios_name_servers:
- 10.0.0.1
- 10.0.1.1
netbios_node_type: 2
vpc_id: vpc-123456
delete_old: True
inherit_existing: False
# Ensure the DHCP option set for the VPC has 10.0.0.4 and 10.0.1.4 as the specified DNS servers, but
# keep any other existing settings. Also, keep the old DHCP option set around.
- ec2_vpc_dhcp_options:
region: us-east-1
dns_servers:
- "{{groups['dns-primary']}}"
- "{{groups['dns-secondary']}}"
vpc_id: vpc-123456
inherit_existing: True
delete_old: False
## Create a DHCP option set with 4.4.4.4 and 8.8.8.8 as the specified DNS servers, with tags
## but do not assign to a VPC
- ec2_vpc_dhcp_options:
region: us-east-1
dns_servers:
- 4.4.4.4
- 8.8.8.8
tags:
Name: google servers
Environment: Test
## Delete a DHCP options set that matches the tags and options specified
- ec2_vpc_dhcp_options:
region: us-east-1
dns_servers:
- 4.4.4.4
- 8.8.8.8
tags:
Name: google servers
Environment: Test
state: absent
## Associate a DHCP options set with a VPC by ID
- ec2_vpc_dhcp_options:
region: us-east-1
dhcp_options_id: dopt-12345678
vpc_id: vpc-123456
"""
import boto.vpc
import boto.ec2
from boto.exception import EC2ResponseError
import socket
import collections
def get_resource_tags(vpc_conn, resource_id):
return dict((t.name, t.value) for t in vpc_conn.get_all_tags(filters={'resource-id': resource_id}))
def ensure_tags(vpc_conn, resource_id, tags, add_only, check_mode):
try:
cur_tags = get_resource_tags(vpc_conn, resource_id)
if tags == cur_tags:
return {'changed': False, 'tags': cur_tags}
to_delete = dict((k, cur_tags[k]) for k in cur_tags if k not in tags)
if to_delete and not add_only:
vpc_conn.delete_tags(resource_id, to_delete, dry_run=check_mode)
to_add = dict((k, tags[k]) for k in tags if k not in cur_tags)
if to_add:
vpc_conn.create_tags(resource_id, to_add, dry_run=check_mode)
latest_tags = get_resource_tags(vpc_conn, resource_id)
return {'changed': True, 'tags': latest_tags}
except EC2ResponseError as e:
module.fail_json(msg=get_error_message(e.args[2]))
def fetch_dhcp_options_for_vpc(vpc_conn, vpc_id):
"""
Returns the DHCP options object currently associated with the requested VPC ID using the VPC
connection variable.
"""
vpcs = vpc_conn.get_all_vpcs(vpc_ids=[vpc_id])
if len(vpcs) != 1 or vpcs[0].dhcp_options_id == "default":
return None
dhcp_options = vpc_conn.get_all_dhcp_options(dhcp_options_ids=[vpcs[0].dhcp_options_id])
if len(dhcp_options) != 1:
return None
return dhcp_options[0]
def match_dhcp_options(vpc_conn, tags=None, options=None):
"""
Finds a DHCP Options object that optionally matches the tags and options provided
"""
dhcp_options = vpc_conn.get_all_dhcp_options()
for dopts in dhcp_options:
if (not tags) or get_resource_tags(vpc_conn, dopts.id) == tags:
if (not options) or dopts.options == options:
return(True, dopts)
return(False, None)
def remove_dhcp_options_by_id(vpc_conn, dhcp_options_id):
associations = vpc_conn.get_all_vpcs(filters={'dhcpOptionsId': dhcp_options_id})
if len(associations) > 0:
return False
else:
vpc_conn.delete_dhcp_options(dhcp_options_id)
return True
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
dhcp_options_id=dict(type='str', default=None),
domain_name=dict(type='str', default=None),
dns_servers=dict(type='list', default=None),
ntp_servers=dict(type='list', default=None),
netbios_name_servers=dict(type='list', default=None),
netbios_node_type=dict(type='int', default=None),
vpc_id=dict(type='str', default=None),
delete_old=dict(type='bool', default=True),
inherit_existing=dict(type='bool', default=False),
tags=dict(type='dict', default=None, aliases=['resource_tags']),
state=dict(type='str', default='present', choices=['present', 'absent'])
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
params = module.params
found = False
changed = False
new_options = collections.defaultdict(lambda: None)
region, ec2_url, boto_params = get_aws_connection_info(module)
connection = connect_to_aws(boto.vpc, region, **boto_params)
existing_options = None
# First check if we were given a dhcp_options_id
if not params['dhcp_options_id']:
# No, so create new_options from the parameters
if params['dns_servers'] != None:
new_options['domain-name-servers'] = params['dns_servers']
if params['netbios_name_servers'] != None:
new_options['netbios-name-servers'] = params['netbios_name_servers']
if params['ntp_servers'] != None:
new_options['ntp-servers'] = params['ntp_servers']
if params['domain_name'] != None:
# needs to be a list for comparison with boto objects later
new_options['domain-name'] = [ params['domain_name'] ]
if params['netbios_node_type'] != None:
# needs to be a list for comparison with boto objects later
new_options['netbios-node-type'] = [ str(params['netbios_node_type']) ]
# If we were given a vpc_id then we need to look at the options on that
if params['vpc_id']:
existing_options = fetch_dhcp_options_for_vpc(connection, params['vpc_id'])
# if we've been asked to inherit existing options, do that now
if params['inherit_existing']:
if existing_options:
for option in [ 'domain-name-servers', 'netbios-name-servers', 'ntp-servers', 'domain-name', 'netbios-node-type']:
if existing_options.options.get(option) and new_options[option] != [] and (not new_options[option] or [''] == new_options[option]):
new_options[option] = existing_options.options.get(option)
# Do the vpc's dhcp options already match what we're asked for? if so we are done
if existing_options and new_options == existing_options.options:
module.exit_json(changed=changed, new_options=new_options, dhcp_options_id=existing_options.id)
# If no vpc_id was given, or the options don't match then look for an existing set using tags
found, dhcp_option = match_dhcp_options(connection, params['tags'], new_options)
# Now let's cover the case where there are existing options that we were told about by id
# If a dhcp_options_id was supplied we don't look at options inside, just set tags (if given)
else:
supplied_options = connection.get_all_dhcp_options(filters={'dhcp-options-id':params['dhcp_options_id']})
if len(supplied_options) != 1:
if params['state'] != 'absent':
module.fail_json(msg=" a dhcp_options_id was supplied, but does not exist")
else:
found = True
dhcp_option = supplied_options[0]
if params['state'] != 'absent' and params['tags']:
ensure_tags(connection, dhcp_option.id, params['tags'], False, module.check_mode)
# Now we have the dhcp options set, let's do the necessary
# if we found options we were asked to remove then try to do so
if params['state'] == 'absent':
if not module.check_mode:
if found:
changed = remove_dhcp_options_by_id(connection, dhcp_option.id)
module.exit_json(changed=changed, new_options={})
# otherwise if we haven't found the required options we have something to do
elif not module.check_mode and not found:
# create some dhcp options if we weren't able to use existing ones
if not found:
# Convert netbios-node-type and domain-name back to strings
if new_options['netbios-node-type']:
new_options['netbios-node-type'] = new_options['netbios-node-type'][0]
if new_options['domain-name']:
new_options['domain-name'] = new_options['domain-name'][0]
# create the new dhcp options set requested
dhcp_option = connection.create_dhcp_options(
new_options['domain-name'],
new_options['domain-name-servers'],
new_options['ntp-servers'],
new_options['netbios-name-servers'],
new_options['netbios-node-type'])
changed = True
if params['tags']:
ensure_tags(connection, dhcp_option.id, params['tags'], False, module.check_mode)
# If we were given a vpc_id, then attach the options we now have to that before we finish
if params['vpc_id'] and not module.check_mode:
changed = True
connection.associate_dhcp_options(dhcp_option.id, params['vpc_id'])
# and remove old ones if that was requested
if params['delete_old'] and existing_options:
remove_dhcp_options_by_id(connection, existing_options.id)
module.exit_json(changed=changed, new_options=new_options, dhcp_options_id=dhcp_option.id)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == "__main__":
main()
| gpl-3.0 |
nclsHart/glances | glances/plugins/glances_monitor.py | 1 | 4261 | # -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2015 Nicolargo <nicolas@nicolargo.com>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Monitor plugin."""
# Import Glances lib
from glances.core.glances_logging import logger
from glances.core.glances_monitor_list import MonitorList as glancesMonitorList
from glances.plugins.glances_plugin import GlancesPlugin
class Plugin(GlancesPlugin):
"""Glances' monitor plugin."""
def __init__(self, args=None):
"""Init the plugin."""
GlancesPlugin.__init__(self, args=args)
# We want to display the stat in the curse interface
self.display_curse = True
# Init stats
self.glances_monitors = None
self.stats = []
def load_limits(self, config):
"""Load the monitored list from the conf file."""
logger.debug("Monitor plugin configuration detected in the configuration file")
self.glances_monitors = glancesMonitorList(config)
def update(self):
"""Update the monitored list."""
if self.get_input() == 'local':
# Monitor list only available in a full Glances environment
# Check if the glances_monitor instance is init
if self.glances_monitors is None:
return self.stats
# Update the monitored list (result of command)
self.glances_monitors.update()
# Put it on the stats var
self.stats = self.glances_monitors.get()
else:
pass
return self.stats
def get_alert(self, nbprocess=0, countmin=None, countmax=None, header="", log=False):
"""Return the alert status relative to the process number."""
if nbprocess is None:
return 'OK'
if countmin is None:
countmin = nbprocess
if countmax is None:
countmax = nbprocess
if nbprocess > 0:
if int(countmin) <= int(nbprocess) <= int(countmax):
return 'OK'
else:
return 'WARNING'
else:
if int(countmin) == 0:
return 'OK'
else:
return 'CRITICAL'
def msg_curse(self, args=None):
"""Return the dict to display in the curse interface."""
# Init the return message
ret = []
# Only process if stats exist and display plugin enable...
if not self.stats or args.disable_process:
return ret
# Build the string message
for m in self.stats:
msg = '{0:<16} '.format(m['description'])
ret.append(self.curse_add_line(
msg, self.get_alert(m['count'], m['countmin'], m['countmax'])))
msg = '{0:<3} '.format(m['count'] if m['count'] > 1 else '')
ret.append(self.curse_add_line(msg))
msg = '{0:13} '.format(_("RUNNING") if m['count'] >= 1 else _("NOT RUNNING"))
ret.append(self.curse_add_line(msg))
# Decode to UTF8 (only for Python 3)
try:
msg = m['result'].decode('utf-8') if m['count'] >= 1 else ''
except (UnicodeError, AttributeError):
try:
msg = m['result'] if m['count'] >= 1 else ''
except UnicodeError:
msg = m['result'].encode('utf-8') if m['count'] >= 1 else ''
ret.append(self.curse_add_line(msg, optional=True, splittable=True))
ret.append(self.curse_new_line())
# Delete the last empty line
try:
ret.pop()
except IndexError:
pass
return ret
| lgpl-3.0 |
adit-chandra/tensorflow | tensorflow/python/keras/optimizer_v2/nadam.py | 6 | 10023 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Nadam for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend_config
from tensorflow.python.keras.optimizer_v2 import learning_rate_schedule
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.optimizers.Nadam')
class Nadam(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the NAdam algorithm.
Much like Adam is essentially RMSprop with momentum, Nadam is Adam with
Nesterov momentum.
Initialization:
$$m_0 := 0 \text{(Initialize 1st moment vector)}$$
$$v_0 := 0 \text{(Initialize 2nd moment vector)}$$
$$mu_0 := 1$$
$$t := 0 \text{(Initialize timestep)}$$
Computes:
$$t := t + 1$$
$$\mu_t := \beta_1 * (1 - 0.5 * 0.96^{0.004 * t})$$
$$g' := g / (1 - \prod_{i=1}^{t}{\mu_i})$$
$$m_t := \beta_1 * m_{t-1} + (1 - \beta_1) * g$$
$$m' := m_t / (1 - \prod_{i=1}^{t+1}{\mu_i})$$
$$v_t := \beta_2 * v_{t-1} + (1 - \beta_2) * g * g$$
$$v' := v_t / (1 - \beta_2^t)$$
$$\bar{m} := (1 - \mu_t) * g' + \mu_{t+1} * m'$$
$$\theta_t := \theta_{t-1} - lr * \bar{m} / (\sqrt{v'} + \epsilon)$$
gradient is evaluated at theta(t) + momentum * v(t), and the variables always
store theta + beta_1 * m / sqrt(v) instead of theta.
References
See [Dozat, T., 2015](http://cs229.stanford.edu/proj2015/054_report.pdf).
"""
def __init__(self,
learning_rate=0.001,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-7,
name='Nadam',
**kwargs):
"""Construct a new Nadam optimizer.
Args:
learning_rate: A Tensor or a floating point value. The learning rate.
beta_1: A float value or a constant float tensor. The exponential decay
rate for the 1st moment estimates.
beta_2: A float value or a constant float tensor. The exponential decay
rate for the exponentially weighted infinity norm.
epsilon: A small constant for numerical stability.
name: Optional name for the operations created when applying gradients.
Defaults to "Adamax".
**kwargs: keyword arguments. Allowed to be {`clipnorm`, `clipvalue`, `lr`,
`decay`}. `clipnorm` is clip gradients by norm; `clipvalue` is clip
gradients by value, `decay` is included for backward compatibility to
allow time inverse decay of learning rate. `lr` is included for backward
compatibility, recommended to use `learning_rate` instead.
"""
# Backwards compatibility with keras NAdam optimizer.
kwargs['decay'] = kwargs.pop('schedule_decay', 0.004)
learning_rate = kwargs.get('lr', learning_rate)
if isinstance(learning_rate, learning_rate_schedule.LearningRateSchedule):
raise ValueError('The Nadam optimizer does not support '
'tf.keras.optimizers.LearningRateSchedules as the '
'learning rate.')
super(Nadam, self).__init__(name, **kwargs)
self._set_hyper('learning_rate', kwargs.get('lr', learning_rate))
self._set_hyper('decay', self._initial_decay)
self._set_hyper('beta_1', beta_1)
self._set_hyper('beta_2', beta_2)
self.epsilon = epsilon or backend_config.epsilon()
self._m_cache = None
def _create_slots(self, var_list):
var_dtype = var_list[0].dtype.base_dtype
if self._m_cache is None:
self._m_cache = self.add_weight(
'momentum_cache',
shape=[],
dtype=var_dtype,
initializer='ones',
trainable=False,
aggregation=tf_variables.VariableAggregation.ONLY_FIRST_REPLICA)
self._weights.append(self._m_cache)
# Separate for-loops to respect the ordering of slot variables from v1.
for var in var_list:
# Create slots for the first moments.
self.add_slot(var, 'm')
for var in var_list:
# Create slots for the second moments.
self.add_slot(var, 'v')
def _prepare_local(self, var_device, var_dtype, apply_state):
lr_t = array_ops.identity(self._get_hyper('learning_rate', var_dtype))
beta_1_t = array_ops.identity(self._get_hyper('beta_1', var_dtype))
beta_2_t = array_ops.identity(self._get_hyper('beta_2', var_dtype))
local_step = math_ops.cast(self.iterations + 1, var_dtype)
next_step = math_ops.cast(self.iterations + 2, var_dtype)
decay_base = math_ops.cast(0.96, var_dtype)
m_t = beta_1_t * (1. - 0.5 * (
math_ops.pow(decay_base, self._initial_decay * local_step)))
m_t_1 = beta_1_t * (1. - 0.5 * (
math_ops.pow(decay_base, self._initial_decay * next_step)))
m_schedule_new = math_ops.cast(self._m_cache_read, var_dtype) * m_t
if var_dtype is self._m_cache.dtype:
m_schedule_new = array_ops.identity(state_ops.assign(
self._m_cache, m_schedule_new, use_locking=self._use_locking))
m_schedule_next = m_schedule_new * m_t_1
apply_state[(var_device, var_dtype)] = dict(
lr_t=lr_t,
neg_lr_t=-lr_t,
epsilon=ops.convert_to_tensor(self.epsilon, var_dtype),
beta_1_t=beta_1_t,
beta_2_t=beta_2_t,
m_t=m_t,
m_t_1=m_t_1,
one_minus_beta_1_t=1 - beta_1_t,
one_minus_beta_2_t=1 - beta_2_t,
one_minus_m_t=1. - m_t,
one_minus_m_schedule_new=1. - m_schedule_new,
one_minus_m_schedule_next=1. - m_schedule_next,
v_t_prime_denominator=1. - math_ops.pow(beta_2_t, local_step),
)
def _prepare(self, var_list):
# Get the value of the momentum cache before starting to apply gradients.
self._m_cache_read = array_ops.identity(self._m_cache)
return super(Nadam, self)._prepare(var_list)
def _resource_apply_dense(self, grad, var, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
m = self.get_slot(var, 'm')
v = self.get_slot(var, 'v')
g_prime = grad / coefficients['one_minus_m_schedule_new']
m_t = (coefficients['beta_1_t'] * m +
coefficients['one_minus_beta_1_t'] * grad)
m_t = state_ops.assign(m, m_t, use_locking=self._use_locking)
m_t_prime = m_t / coefficients['one_minus_m_schedule_next']
v_t = (coefficients['beta_2_t'] * v +
coefficients['one_minus_beta_2_t'] * math_ops.square(grad))
v_t = state_ops.assign(v, v_t, use_locking=self._use_locking)
v_t_prime = v_t / coefficients['v_t_prime_denominator']
m_t_bar = (coefficients['one_minus_m_t'] * g_prime +
coefficients['m_t_1'] * m_t_prime)
var_t = var - coefficients['lr_t'] * m_t_bar / (
math_ops.sqrt(v_t_prime) + coefficients['epsilon'])
return state_ops.assign(var, var_t, use_locking=self._use_locking).op
def _resource_apply_sparse(self, grad, var, indices, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
m = self.get_slot(var, 'm')
v = self.get_slot(var, 'v')
g_prime = grad / coefficients['one_minus_m_schedule_new']
# m_t = beta1 * m + (1 - beta1) * g_t
m_scaled_g_values = grad * coefficients['one_minus_beta_1_t']
m_t = state_ops.assign(m, m * coefficients['beta_1_t'],
use_locking=self._use_locking)
with ops.control_dependencies([m_t]):
m_t = self._resource_scatter_add(m, indices, m_scaled_g_values)
m_t_slice = array_ops.gather(m_t, indices)
m_t_prime = m_t_slice / coefficients['one_minus_m_schedule_next']
m_t_bar = (coefficients['one_minus_m_t'] * g_prime +
coefficients['m_t_1'] * m_t_prime)
# v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
v_scaled_g_values = (grad * grad) * coefficients['one_minus_beta_2_t']
v_t = state_ops.assign(v, v * coefficients['beta_2_t'],
use_locking=self._use_locking)
with ops.control_dependencies([v_t]):
v_t = self._resource_scatter_add(v, indices, v_scaled_g_values)
v_t_slice = array_ops.gather(v_t, indices)
v_t_prime = v_t_slice / coefficients['v_t_prime_denominator']
v_prime_sqrt_plus_eps = math_ops.sqrt(v_t_prime) + coefficients['epsilon']
var_update = self._resource_scatter_add(
var, indices,
coefficients['neg_lr_t'] * m_t_bar / v_prime_sqrt_plus_eps)
return control_flow_ops.group(*[var_update, m_t_bar, v_t])
def get_config(self):
config = super(Nadam, self).get_config()
config.update({
'learning_rate': self._serialize_hyperparameter('learning_rate'),
'decay': self._serialize_hyperparameter('decay'),
'beta_1': self._serialize_hyperparameter('beta_1'),
'beta_2': self._serialize_hyperparameter('beta_2'),
'epsilon': self.epsilon,
})
return config
| apache-2.0 |
harshilasu/GraphicMelon | y/google-cloud-sdk/platform/gsutil/third_party/boto/boto/cloudfront/identity.py | 170 | 4483 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import uuid
class OriginAccessIdentity(object):
def __init__(self, connection=None, config=None, id='',
s3_user_id='', comment=''):
self.connection = connection
self.config = config
self.id = id
self.s3_user_id = s3_user_id
self.comment = comment
self.etag = None
def startElement(self, name, attrs, connection):
if name == 'CloudFrontOriginAccessIdentityConfig':
self.config = OriginAccessIdentityConfig()
return self.config
else:
return None
def endElement(self, name, value, connection):
if name == 'Id':
self.id = value
elif name == 'S3CanonicalUserId':
self.s3_user_id = value
elif name == 'Comment':
self.comment = value
else:
setattr(self, name, value)
def update(self, comment=None):
new_config = OriginAccessIdentityConfig(self.connection,
self.config.caller_reference,
self.config.comment)
if comment is not None:
new_config.comment = comment
self.etag = self.connection.set_origin_identity_config(self.id, self.etag, new_config)
self.config = new_config
def delete(self):
return self.connection.delete_origin_access_identity(self.id, self.etag)
def uri(self):
return 'origin-access-identity/cloudfront/%s' % self.id
class OriginAccessIdentityConfig(object):
def __init__(self, connection=None, caller_reference='', comment=''):
self.connection = connection
if caller_reference:
self.caller_reference = caller_reference
else:
self.caller_reference = str(uuid.uuid4())
self.comment = comment
def to_xml(self):
s = '<?xml version="1.0" encoding="UTF-8"?>\n'
s += '<CloudFrontOriginAccessIdentityConfig xmlns="http://cloudfront.amazonaws.com/doc/2009-09-09/">\n'
s += ' <CallerReference>%s</CallerReference>\n' % self.caller_reference
if self.comment:
s += ' <Comment>%s</Comment>\n' % self.comment
s += '</CloudFrontOriginAccessIdentityConfig>\n'
return s
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Comment':
self.comment = value
elif name == 'CallerReference':
self.caller_reference = value
else:
setattr(self, name, value)
class OriginAccessIdentitySummary(object):
def __init__(self, connection=None, id='',
s3_user_id='', comment=''):
self.connection = connection
self.id = id
self.s3_user_id = s3_user_id
self.comment = comment
self.etag = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Id':
self.id = value
elif name == 'S3CanonicalUserId':
self.s3_user_id = value
elif name == 'Comment':
self.comment = value
else:
setattr(self, name, value)
def get_origin_access_identity(self):
return self.connection.get_origin_access_identity_info(self.id)
| gpl-3.0 |
mattjmcnaughton/kubernetes | cluster/juju/layers/kubernetes-e2e/reactive/kubernetes_e2e.py | 168 | 8148 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charms import layer
from charms.layer import snap
from charms.reactive import hook
from charms.reactive import is_state
from charms.reactive import set_state
from charms.reactive import when
from charms.reactive import when_not
from charms.reactive.helpers import data_changed
from charmhelpers.core import hookenv, unitdata
from shlex import split
from subprocess import check_call
from subprocess import check_output
db = unitdata.kv()
USER = 'system:e2e'
@hook('upgrade-charm')
def reset_delivery_states():
''' Remove the state set when resources are unpacked. '''
install_snaps()
@when('kubernetes-e2e.installed')
def report_status():
''' Report the status of the charm. '''
messaging()
def messaging():
''' Probe our relations to determine the proper messaging to the
end user '''
missing_services = []
if not is_state('kubernetes-master.available'):
missing_services.append('kubernetes-master:http')
if not is_state('certificates.available'):
missing_services.append('certificates')
if not is_state('kubeconfig.ready'):
missing_services.append('kubernetes-master:kube-control')
if missing_services:
if len(missing_services) > 1:
subject = 'relations'
else:
subject = 'relation'
services = ','.join(missing_services)
message = 'Missing {0}: {1}'.format(subject, services)
hookenv.status_set('blocked', message)
return
hookenv.status_set('active', 'Ready to test.')
@when('config.changed.channel')
def channel_changed():
install_snaps()
def install_snaps():
''' Deliver the e2e and kubectl components from the binary resource stream
packages declared in the charm '''
channel = hookenv.config('channel')
hookenv.status_set('maintenance', 'Installing kubectl snap')
snap.install('kubectl', channel=channel, classic=True)
hookenv.status_set('maintenance', 'Installing kubernetes-test snap')
snap.install('kubernetes-test', channel=channel, classic=True)
set_state('kubernetes-e2e.installed')
@when('tls_client.ca.saved', 'tls_client.client.certificate.saved',
'tls_client.client.key.saved', 'kubernetes-master.available',
'kubernetes-e2e.installed', 'e2e.auth.bootstrapped')
@when_not('kubeconfig.ready')
def prepare_kubeconfig_certificates(master):
''' Prepare the data to feed to create the kubeconfig file. '''
layer_options = layer.options('tls-client')
# Get all the paths to the tls information required for kubeconfig.
ca = layer_options.get('ca_certificate_path')
creds = db.get('credentials')
data_changed('kube-control.creds', creds)
servers = get_kube_api_servers(master)
# pedantry
kubeconfig_path = '/home/ubuntu/.kube/config'
# Create kubernetes configuration in the default location for ubuntu.
create_kubeconfig('/root/.kube/config', servers[0], ca,
token=creds['client_token'], user='root')
create_kubeconfig(kubeconfig_path, servers[0], ca,
token=creds['client_token'], user='ubuntu')
# Set permissions on the ubuntu users kubeconfig to ensure a consistent UX
cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path]
check_call(cmd)
messaging()
set_state('kubeconfig.ready')
@when('kube-control.connected')
def request_credentials(kube_control):
""" Request authorization creds."""
# Ask for a user, although we will be using the 'client_token'
kube_control.set_auth_request(USER)
@when('kube-control.auth.available')
def catch_change_in_creds(kube_control):
"""Request a service restart in case credential updates were detected."""
creds = kube_control.get_auth_credentials(USER)
if creds \
and data_changed('kube-control.creds', creds) \
and creds['user'] == USER:
# We need to cache the credentials here because if the
# master changes (master leader dies and replaced by a new one)
# the new master will have no recollection of our certs.
db.set('credentials', creds)
set_state('e2e.auth.bootstrapped')
@when('kubernetes-e2e.installed', 'kubeconfig.ready')
def set_app_version():
''' Declare the application version to juju '''
cmd = ['kubectl', 'version', '--client']
from subprocess import CalledProcessError
try:
version = check_output(cmd).decode('utf-8')
except CalledProcessError:
message = "Missing kubeconfig causes errors. Skipping version set."
hookenv.log(message)
return
git_version = version.split('GitVersion:"v')[-1]
version_from = git_version.split('",')[0]
hookenv.application_version_set(version_from.rstrip())
def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None,
user='ubuntu', context='juju-context',
cluster='juju-cluster', password=None, token=None):
'''Create a configuration for Kubernetes based on path using the supplied
arguments for values of the Kubernetes server, CA, key, certificate, user
context and cluster.'''
if not key and not certificate and not password and not token:
raise ValueError('Missing authentication mechanism.')
# token and password are mutually exclusive. Error early if both are
# present. The developer has requested an impossible situation.
# see: kubectl config set-credentials --help
if token and password:
raise ValueError('Token and Password are mutually exclusive.')
# Create the config file with the address of the master server.
cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \
'--server={2} --certificate-authority={3} --embed-certs=true'
check_call(split(cmd.format(kubeconfig, cluster, server, ca)))
# Delete old users
cmd = 'kubectl config --kubeconfig={0} unset users'
check_call(split(cmd.format(kubeconfig)))
# Create the credentials using the client flags.
cmd = 'kubectl config --kubeconfig={0} ' \
'set-credentials {1} '.format(kubeconfig, user)
if key and certificate:
cmd = '{0} --client-key={1} --client-certificate={2} '\
'--embed-certs=true'.format(cmd, key, certificate)
if password:
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
# This is mutually exclusive from password. They will not work together.
if token:
cmd = "{0} --token={1}".format(cmd, token)
check_call(split(cmd))
# Create a default context with the cluster.
cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \
'--cluster={2} --user={3}'
check_call(split(cmd.format(kubeconfig, context, cluster, user)))
# Make the config use this new context.
cmd = 'kubectl config --kubeconfig={0} use-context {1}'
check_call(split(cmd.format(kubeconfig, context)))
def get_kube_api_servers(master):
'''Return the kubernetes api server address and port for this
relationship.'''
hosts = []
# Iterate over every service from the relation object.
for service in master.services():
for unit in service['hosts']:
hosts.append('https://{0}:{1}'.format(unit['hostname'],
unit['port']))
return hosts
def determine_arch():
''' dpkg wrapper to surface the architecture we are tied to'''
cmd = ['dpkg', '--print-architecture']
output = check_output(cmd).decode('utf-8')
return output.rstrip()
| apache-2.0 |
minorua/QGIS | tests/src/python/test_qgsvirtuallayerdefinition.py | 28 | 4276 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsVirtualLayerDefinition
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Hugo Mercier'
__date__ = '10/12/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
from qgis.core import (QgsField,
QgsWkbTypes,
QgsFields,
QgsVirtualLayerDefinition
)
from qgis.testing import unittest
from qgis.PyQt.QtCore import QVariant, QUrl
import os
def strToUrl(s):
return QUrl.fromEncoded(bytes(s, "utf8"))
class TestQgsVirtualLayerDefinition(unittest.TestCase):
def test1(self):
d = QgsVirtualLayerDefinition()
self.assertEqual(d.toString(), "")
d.setFilePath("/file")
self.assertEqual(d.toString(), "file:///file")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).filePath(), "/file")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).filePath(), "/file")
d.setFilePath(os.path.join('C:/', 'file'))
self.assertEqual(d.toString(), "file:///C:/file")
# self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).filePath(), os.path.join('C:/', 'file'))
d.setQuery("SELECT * FROM mytable")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).query(), "SELECT * FROM mytable")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).query(), "SELECT * FROM mytable")
q = "SELECT * FROM tableéé /*:int*/"
d.setQuery(q)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).query(), q)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).query(), q)
s1 = "file://foo&bar=okié"
d.addSource("name", s1, "provider", "utf8")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[0].source(), s1)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[0].source(), s1)
n1 = "éé ok"
d.addSource(n1, s1, "provider")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[1].name(), n1)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[1].name(), n1)
d.addSource("ref1", "id0001")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[2].reference(), "id0001")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[2].reference(), "id0001")
s = "dbname='C:\\tt' table=\"test\" (geometry) sql="
d.addSource("nn", s, "spatialite")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[3].source(), s)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[3].source(), s)
d.setGeometryField("geom")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).geometryField(), "geom")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).geometryField(), "geom")
d.setGeometryWkbType(QgsWkbTypes.Point)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).geometryWkbType(), QgsWkbTypes.Point)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).geometryWkbType(), QgsWkbTypes.Point)
f = QgsFields()
f.append(QgsField("a", QVariant.Int))
f.append(QgsField("f", QVariant.Double))
f.append(QgsField("s", QVariant.String))
d.setFields(f)
f2 = QgsVirtualLayerDefinition.fromUrl(d.toUrl()).fields()
self.assertEqual(f[0].name(), f2[0].name())
self.assertEqual(f[0].type(), f2[0].type())
self.assertEqual(f[1].name(), f2[1].name())
self.assertEqual(f[1].type(), f2[1].type())
self.assertEqual(f[2].name(), f2[2].name())
self.assertEqual(f[2].type(), f2[2].type())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
hardanimal/UFT_UPGEM | Lib/site-packages/pip-1.2.1-py2.7.egg/pip/vcs/__init__.py | 19 | 8752 | """Handles all VCS (version control) support"""
import os
import shutil
from pip.backwardcompat import urlparse, urllib
from pip.log import logger
from pip.util import (display_path, backup_dir, find_command,
ask, rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
# Register more schemes with urlparse for various version control systems
urlparse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urlparse, 'uses_fragment', None):
urlparse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
def __iter__(self):
return self._registry.__iter__()
@property
def backends(self):
return list(self._registry.values())
@property
def dirnames(self):
return [backend.dirname for backend in self.backends]
@property
def all_schemes(self):
schemes = []
for backend in self.backends:
schemes.extend(backend.schemes)
return schemes
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warn('Cannot register VCS %s' % cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
def unregister(self, cls=None, name=None):
if name in self._registry:
del self._registry[name]
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warn('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
Return the name of the version control backend if found at given
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
path = os.path.join(location, vc_type.dirname)
if os.path.exists(path):
return vc_type.name
return None
def get_backend(self, name):
name = name.lower()
if name in self._registry:
return self._registry[name]
def get_backend_from_location(self, location):
vc_type = self.get_backend_name(location)
if vc_type:
return self.get_backend(vc_type)
return None
vcs = VcsSupport()
class VersionControl(object):
name = ''
dirname = ''
def __init__(self, url=None, *args, **kwargs):
self.url = url
self._cmd = None
super(VersionControl, self).__init__(*args, **kwargs)
def _filter(self, line):
return (logger.INFO, line)
def _is_local_repository(self, repo):
"""
posix absolute paths start with os.path.sep,
win32 ones ones start with drive (like c:\\folder)
"""
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
@property
def cmd(self):
if self._cmd is not None:
return self._cmd
command = find_command(self.name)
logger.info('Found command %r at %r' % (self.name, command))
self._cmd = command
return command
def get_url_rev(self):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
"""
error_message= (
"Sorry, '%s' is a malformed VCS url. "
"Ihe format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp")
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urlparse.urlsplit(url)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def get_info(self, location):
"""
Returns (url, revision), where both are strings
"""
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
Normalize a URL for comparison by unquoting it and removing any trailing slash.
"""
return urllib.unquote(url).rstrip('/')
def compare_urls(self, url1, url2):
"""
Compare two repo URLs for identity, ignoring incidental differences.
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def parse_vcs_bundle_file(self, content):
"""
Takes the contents of the bundled text file that explains how to revert
the stripped off version control data of the given package and returns
the URL and revision of it.
"""
raise NotImplementedError
def obtain(self, dest):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
"""
raise NotImplementedError
def switch(self, dest, url, rev_options):
"""
Switch the repo at ``dest`` to point to ``URL``.
"""
raise NotImplemented
def update(self, dest, rev_options):
"""
Update an already-existing repo to the given ``rev_options``.
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options, rev_display):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
"""
checkout = True
prompt = False
if os.path.exists(dest):
checkout = False
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.info('%s in %s exists, and has correct URL (%s)' %
(self.repo_name.title(), display_path(dest),
url))
logger.notify('Updating %s %s%s' %
(display_path(dest), self.repo_name,
rev_display))
self.update(dest, rev_options)
else:
logger.warn('%s %s in %s exists with URL %s' %
(self.name, self.repo_name,
display_path(dest), existing_url))
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warn('Directory %s already exists, '
'and is not a %s %s.' %
(dest, self.name, self.repo_name))
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warn('The plan is to install the %s repository %s' %
(self.name, url))
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.notify('Switching %s %s to %s%s' %
(self.repo_name, display_path(dest), url,
rev_display))
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warn('Deleting %s' % display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warn('Backing up %s to %s'
% (display_path(dest), dest_dir))
shutil.move(dest, dest_dir)
checkout = True
return checkout
def unpack(self, location):
if os.path.exists(location):
rmtree(location)
self.obtain(location)
def get_src_requirement(self, dist, location, find_tags=False):
raise NotImplementedError
def get_src_requirement(dist, location, find_tags):
version_control = vcs.get_backend_from_location(location)
if version_control:
return version_control().get_src_requirement(dist, location, find_tags)
logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
return dist.as_requirement()
| gpl-3.0 |
pdav/khal | khal/__main__.py | 4 | 1179 | # Copyright (c) 2013-2021 khal contributors
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from khal.cli import main_khal
if __name__ == '__main__':
main_khal()
| mit |
luogangyi/Ceilometer-oVirt | ceilometer/openstack/common/eventlet_backdoor.py | 5 | 4781 | # Copyright (c) 2012 OpenStack Foundation.
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import errno
import gc
import os
import pprint
import socket
import sys
import traceback
import eventlet
import eventlet.backdoor
import greenlet
from oslo.config import cfg
from ceilometer.openstack.common.gettextutils import _LI
from ceilometer.openstack.common import log as logging
help_for_backdoor_port = (
"Acceptable values are 0, <port>, and <start>:<end>, where 0 results "
"in listening on a random tcp port number; <port> results in listening "
"on the specified port number (and not enabling backdoor if that port "
"is in use); and <start>:<end> results in listening on the smallest "
"unused port number within the specified range of port numbers. The "
"chosen port is displayed in the service's log file.")
eventlet_backdoor_opts = [
cfg.StrOpt('backdoor_port',
help="Enable eventlet backdoor. %s" % help_for_backdoor_port)
]
CONF = cfg.CONF
CONF.register_opts(eventlet_backdoor_opts)
LOG = logging.getLogger(__name__)
class EventletBackdoorConfigValueError(Exception):
def __init__(self, port_range, help_msg, ex):
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
'%(help)s' %
{'range': port_range, 'ex': ex, 'help': help_msg})
super(EventletBackdoorConfigValueError, self).__init__(msg)
self.port_range = port_range
def _dont_use_this():
print("Don't use this, just disconnect instead")
def _find_objects(t):
return [o for o in gc.get_objects() if isinstance(o, t)]
def _print_greenthreads():
for i, gt in enumerate(_find_objects(greenlet.greenlet)):
print(i, gt)
traceback.print_stack(gt.gr_frame)
print()
def _print_nativethreads():
for threadId, stack in sys._current_frames().items():
print(threadId)
traceback.print_stack(stack)
print()
def _parse_port_range(port_range):
if ':' not in port_range:
start, end = port_range, port_range
else:
start, end = port_range.split(':', 1)
try:
start, end = int(start), int(end)
if end < start:
raise ValueError
return start, end
except ValueError as ex:
raise EventletBackdoorConfigValueError(port_range, ex,
help_for_backdoor_port)
def _listen(host, start_port, end_port, listen_func):
try_port = start_port
while True:
try:
return listen_func((host, try_port))
except socket.error as exc:
if (exc.errno != errno.EADDRINUSE or
try_port >= end_port):
raise
try_port += 1
def initialize_if_enabled():
backdoor_locals = {
'exit': _dont_use_this, # So we don't exit the entire process
'quit': _dont_use_this, # So we don't exit the entire process
'fo': _find_objects,
'pgt': _print_greenthreads,
'pnt': _print_nativethreads,
}
if CONF.backdoor_port is None:
return None
start_port, end_port = _parse_port_range(str(CONF.backdoor_port))
# NOTE(johannes): The standard sys.displayhook will print the value of
# the last expression and set it to __builtin__._, which overwrites
# the __builtin__._ that gettext sets. Let's switch to using pprint
# since it won't interact poorly with gettext, and it's easier to
# read the output too.
def displayhook(val):
if val is not None:
pprint.pprint(val)
sys.displayhook = displayhook
sock = _listen('localhost', start_port, end_port, eventlet.listen)
# In the case of backdoor port being zero, a port number is assigned by
# listen(). In any case, pull the port number out here.
port = sock.getsockname()[1]
LOG.info(
_LI('Eventlet backdoor listening on %(port)s for process %(pid)d') %
{'port': port, 'pid': os.getpid()}
)
eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock,
locals=backdoor_locals)
return port
| apache-2.0 |
andymckay/addons-server | src/olympia/blocklist/views.py | 2 | 7809 | import base64
import collections
import hashlib
from datetime import datetime
from operator import attrgetter
import time
from django.core.cache import cache
from django.db.models import Q, signals as db_signals
from django.db.transaction import non_atomic_requests
from django.http import JsonResponse
from django.shortcuts import get_object_or_404, render
from django.utils.cache import patch_cache_control
from django.utils.encoding import smart_str
from olympia.amo.utils import sorted_groupby
from olympia.amo.tasks import flush_front_end_cache_urls
from olympia.versions.compare import version_int
from .models import (
BlocklistApp, BlocklistCA, BlocklistDetail, BlocklistGfx, BlocklistItem,
BlocklistIssuerCert, BlocklistPlugin)
from .utils import (
certificates_to_json, addons_to_json, plugins_to_json, gfxs_to_json)
App = collections.namedtuple('App', 'guid min max')
BlItem = collections.namedtuple('BlItem', 'rows os modified block_id prefs')
@non_atomic_requests
def blocklist(request, apiver, app, appver):
key = 'blocklist:%s:%s:%s' % (apiver, app, appver)
# Use md5 to make sure the memcached key is clean.
key = hashlib.md5(smart_str(key)).hexdigest()
cache.add('blocklist:keyversion', 1)
version = cache.get('blocklist:keyversion')
response = cache.get(key, version=version)
if response is None:
response = _blocklist(request, apiver, app, appver)
cache.set(key, response, 60 * 60, version=version)
patch_cache_control(response, max_age=60 * 60)
return response
def _blocklist(request, apiver, app, appver):
apiver = int(apiver)
items = get_items(apiver, app, appver)[0]
plugins = get_plugins(apiver, app, appver)
gfxs = BlocklistGfx.objects.filter(Q(guid__isnull=True) | Q(guid=app))
issuerCertBlocks = BlocklistIssuerCert.objects.all()
cas = None
try:
cas = BlocklistCA.objects.all()[0]
# base64encode does not allow str as argument
cas = base64.b64encode(cas.data.encode('utf-8'))
except IndexError:
pass
# Find the latest created/modified date across all sections.
all_ = list(items.values()) + list(plugins) + list(gfxs)
last_update = max(x.modified for x in all_) if all_ else datetime.now()
# The client expects milliseconds, Python's time returns seconds.
last_update = int(time.mktime(last_update.timetuple()) * 1000)
data = dict(items=items, plugins=plugins, gfxs=gfxs, apiver=apiver,
appguid=app, appver=appver, last_update=last_update, cas=cas,
issuerCertBlocks=issuerCertBlocks)
return render(request, 'blocklist/blocklist.xml', data,
content_type='text/xml')
def clear_blocklist(*args, **kw):
# Something in the blocklist changed; invalidate all responses.
cache.add('blocklist:keyversion', 1)
cache.incr('blocklist:keyversion')
flush_front_end_cache_urls.delay(['/blocklist/*'])
for m in (BlocklistItem, BlocklistPlugin, BlocklistGfx, BlocklistApp,
BlocklistCA, BlocklistDetail, BlocklistIssuerCert):
db_signals.post_save.connect(clear_blocklist, sender=m,
dispatch_uid='save_%s' % m)
db_signals.post_delete.connect(clear_blocklist, sender=m,
dispatch_uid='delete_%s' % m)
def get_items(apiver=None, app=None, appver=None, groupby='guid'):
# Collapse multiple blocklist items (different version ranges) into one
# item and collapse each item's apps.
if app:
app_query = Q(app__guid__isnull=True) | Q(app__guid=app)
else:
# This is useful to make the LEFT OUTER JOIN with blapps then
# used in the extra clause.
app_query = Q(app__isnull=True) | Q(app__isnull=False)
addons = (BlocklistItem.objects.no_cache()
.select_related('details')
.prefetch_related('prefs')
.filter(app_query)
.order_by('-modified')
.extra(select={'app_guid': 'blapps.guid',
'app_min': 'blapps.min',
'app_max': 'blapps.max'}))
items, details = {}, {}
for guid, rows in sorted_groupby(addons, groupby):
rows = list(rows)
rr = []
prefs = []
for id, rs in sorted_groupby(rows, 'id'):
rs = list(rs)
rr.append(rs[0])
prefs.extend(p.pref for p in rs[0].prefs.all())
rs[0].apps = [App(r.app_guid, r.app_min, r.app_max)
for r in rs if r.app_guid]
os = [r.os for r in rr if r.os]
items[guid] = BlItem(rr, os[0] if os else None, rows[0].modified,
rows[0].block_id, prefs)
details[guid] = sorted(rows, key=attrgetter('id'))[0]
return items, details
def get_plugins(apiver=3, app=None, appver=None):
# API versions < 3 ignore targetApplication entries for plugins so only
# block the plugin if the appver is within the block range.
if app:
app_query = (Q(app__isnull=True) |
Q(app__guid=app) |
Q(app__guid__isnull=True))
else:
app_query = Q(app__isnull=True) | Q(app__isnull=False)
plugins = (BlocklistPlugin.objects.no_cache().select_related('details')
.filter(app_query)
.extra(select={'app_guid': 'blapps.guid',
'app_min': 'blapps.min',
'app_max': 'blapps.max'}))
if apiver < 3 and appver is not None:
def between(ver, min, max):
if not (min and max):
return True
return version_int(min) < ver < version_int(max)
app_version = version_int(appver)
plugins = [p for p in plugins if between(app_version, p.app_min,
p.app_max)]
return list(plugins)
@non_atomic_requests
def blocklist_json(request):
key = 'blocklist:json'
cache.add('blocklist:keyversion', 1)
version = cache.get('blocklist:keyversion')
response = cache.get(key, version=version)
if response is None:
response = _blocklist_json(request)
cache.set(key, response, 60 * 60, version=version)
patch_cache_control(response, max_age=60 * 60)
return response
def _blocklist_json(request):
"""Export the whole blocklist in JSON.
It will select blocklists for all apps.
"""
items, _ = get_items(groupby='id')
plugins = get_plugins()
issuerCertBlocks = BlocklistIssuerCert.objects.all()
gfxs = BlocklistGfx.objects.all()
ca = None
try:
ca = BlocklistCA.objects.all()[0]
# base64encode does not allow str as argument
ca = base64.b64encode(ca.data.encode('utf-8'))
except IndexError:
pass
last_update = int(round(time.time() * 1000))
results = {
'last_update': last_update,
'certificates': certificates_to_json(issuerCertBlocks),
'addons': addons_to_json(items),
'plugins': plugins_to_json(plugins),
'gfx': gfxs_to_json(gfxs),
'ca': ca,
}
return JsonResponse(results)
@non_atomic_requests
def blocked_list(request, apiver=3):
app = request.APP.guid
objs = get_items(apiver, app)[1].values() + get_plugins(apiver, app)
items = sorted(objs, key=attrgetter('created'), reverse=True)
return render(request, 'blocklist/blocked_list.html', {'items': items})
# The id is prefixed with [ip] so we know which model to use.
@non_atomic_requests
def blocked_detail(request, id):
bltypes = dict((m._type, m) for m in (BlocklistItem, BlocklistPlugin))
item = get_object_or_404(bltypes[id[0]], details=id[1:])
return render(request, 'blocklist/blocked_detail.html', {'item': item})
| bsd-3-clause |
codeworldprodigy/lab4 | lib/flask/flask/sessions.py | 348 | 12882 | # -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import uuid
import hashlib
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from ._compat import iteritems, text_type
from itsdangerous import URLSafeTimedSerializer, BadSignature
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes `False` in.
new = False
#: for some backends this will always be `True`, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes `True` in.
modified = True
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif isinstance(value, uuid.UUID):
return {' u': value.hex}
elif callable(getattr(value, '__html__', None)):
return {' m': text_type(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in iteritems(value))
elif isinstance(value, str):
try:
return text_type(value)
except UnicodeError:
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = next(iteritems(obj))
if the_key == ' t':
return tuple(the_value)
elif the_key == ' u':
return uuid.UUID(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Baseclass for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns `None` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop of the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's `None`.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or `None` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def open_session(self, app, request):
"""This method has to be implemented and must either return `None`
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
from flask.debughelpers import UnexpectedUnicodeError
| apache-2.0 |
WASPACDC/hmdsm.repository | plugin.video.loganaddon/Images/resources/lib/sources/dizibox_tv.py | 20 | 6432 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import cache
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://www.dizibox.com'
def dizibox_shows(self):
try:
result = client.source(self.base_link)
result = client.parseDOM(result, 'input', {'id': 'filterAllCategories'})[0]
result = client.parseDOM(result, 'li')
result = zip(client.parseDOM(result, 'a', ret='href'), client.parseDOM(result, 'a'))
result = [(re.sub('http.+?//.+?/','/', i[0]), cleantitle.tv(i[1])) for i in result]
return result
except:
return
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
result = cache.get(self.dizibox_shows, 72)
tvshowtitle = cleantitle.tv(tvshowtitle)
result = [i[0] for i in result if tvshowtitle == i[1]][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
season, episode = '%01d' % int(season), '%01d' % int(episode)
result = client.source(url)
if not season == '1':
url = client.parseDOM(result, 'a', ret='href', attrs = {'class': 'season-.+?'})
url = [i for i in url if '/%s-sezon-' % season in i][0]
result = client.source(url)
result = client.parseDOM(result, 'a', ret='href')
result = [i for i in result if '%s-sezon-%s-bolum-' % (season, episode) in i][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
sources_url = urlparse.urljoin(self.base_link, url)
result = client.source(sources_url, close=False)
result = re.sub(r'[^\x00-\x7F]+','', result)
result = re.compile('(<option.*?</option>)', re.DOTALL).findall(result)
result = [(client.parseDOM(i, 'option', ret='href'), client.parseDOM(i, 'option', ret='value'), client.parseDOM(i, 'option')) for i in result]
result = [i[0] + i[1] for i in result if len(i[2]) > 0 and i[2][0] == 'Altyazsz'][0][0]
url = urlparse.urljoin(self.base_link, result)
result = client.source(url, close=False)
url = client.parseDOM(result, 'span', attrs = {'class': 'object-wrapper'})[0]
url = client.parseDOM(url, 'iframe', ret='src')[0]
url = client.replaceHTMLCodes(url)
result = client.source(url, close=False)
try:
r = re.compile('"?file"?\s*:\s*"([^"]+)"\s*,\s*"?label"?\s*:\s*"(\d+)p?"').findall(result)
if r == []: raise Exception()
r = [(i[0].replace('\\/', '/').replace('\\&', '&').decode('unicode_escape'), int(i[1])) for i in r]
u = [('%s|User-Agent=%s&Referer=%s' % (i[0], urllib.quote_plus(client.agent()), urllib.quote_plus(sources_url)), i[1], 'Dizibox') for i in r if not 'google' in i[0]]
u += [(i[0], i[1], 'GVideo') for i in r if 'google' in i[0]]
try: sources.append({'source': [i[2] for i in u if i[1] >= 1080][0], 'quality': '1080p', 'provider': 'Dizibox', 'url': [i[0] for i in u if i[1] >= 1080][0]})
except: pass
try: sources.append({'source': [i[2] for i in u if 720 <= i[1] < 1080][0], 'quality': 'HD', 'provider': 'Dizibox', 'url': [i[0] for i in u if 720 <= i[1] < 1080][0]})
except: pass
try: sources.append({'source': [i[2] for i in u if i[1] < 720][0], 'quality': 'SD', 'provider': 'Dizibox', 'url': [i[0] for i in u if i[1] < 720][0]})
except: pass
return sources
except:
pass
try:
if '.dizibox.' in url: url = re.compile('location\.href\s*=\s*"(.+?)"').findall(result)[0]
host = urlparse.urlparse(url).netloc
host = host.replace('mail.ru', 'mailru.ru').rsplit('.', 1)[0].split('.')[-1].lower()
strm = resolvers.request(url)
if strm == url or strm == None: raise Exception()
if type(strm) == list:
for i in strm: sources.append({'source': host, 'quality': i['quality'], 'provider': 'Dizibox', 'url': i['url']})
else:
sources.append({'source': host, 'quality': 'HD', 'provider': 'Dizibox', 'url': strm})
return sources
except:
pass
except:
return sources
def resolve(self, url):
try:
if not 'google' in url: return url
if url.startswith('stack://'): return url
url = client.request(url, output='geturl')
if 'requiressl=yes' in url: url = url.replace('http://', 'https://')
else: url = url.replace('https://', 'http://')
return url
except:
return
| gpl-2.0 |
taaviteska/django | django/contrib/auth/middleware.py | 82 | 5399 | from django.conf import settings
from django.contrib import auth
from django.contrib.auth import load_backend
from django.contrib.auth.backends import RemoteUserBackend
from django.core.exceptions import ImproperlyConfigured
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import SimpleLazyObject
def get_user(request):
if not hasattr(request, '_cached_user'):
request._cached_user = auth.get_user(request)
return request._cached_user
class AuthenticationMiddleware(MiddlewareMixin):
def process_request(self, request):
assert hasattr(request, 'session'), (
"The Django authentication middleware requires session middleware "
"to be installed. Edit your MIDDLEWARE%s setting to insert "
"'django.contrib.sessions.middleware.SessionMiddleware' before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
) % ("_CLASSES" if settings.MIDDLEWARE is None else "")
request.user = SimpleLazyObject(lambda: get_user(request))
class RemoteUserMiddleware(MiddlewareMixin):
"""
Middleware for utilizing Web-server-provided authentication.
If request.user is not authenticated, then this middleware attempts to
authenticate the username passed in the ``REMOTE_USER`` request header.
If authentication is successful, the user is automatically logged in to
persist the user in the session.
The header used is configurable and defaults to ``REMOTE_USER``. Subclass
this class and change the ``header`` attribute if you need to use a
different header.
"""
# Name of request header to grab username from. This will be the key as
# used in the request.META dictionary, i.e. the normalization of headers to
# all uppercase and the addition of "HTTP_" prefix apply.
header = "REMOTE_USER"
force_logout_if_no_header = True
def process_request(self, request):
# AuthenticationMiddleware is required so that request.user exists.
if not hasattr(request, 'user'):
raise ImproperlyConfigured(
"The Django remote user auth middleware requires the"
" authentication middleware to be installed. Edit your"
" MIDDLEWARE setting to insert"
" 'django.contrib.auth.middleware.AuthenticationMiddleware'"
" before the RemoteUserMiddleware class.")
try:
username = request.META[self.header]
except KeyError:
# If specified header doesn't exist then remove any existing
# authenticated remote-user, or return (leaving request.user set to
# AnonymousUser by the AuthenticationMiddleware).
if self.force_logout_if_no_header and request.user.is_authenticated:
self._remove_invalid_user(request)
return
# If the user is already authenticated and that user is the user we are
# getting passed in the headers, then the correct user is already
# persisted in the session and we don't need to continue.
if request.user.is_authenticated:
if request.user.get_username() == self.clean_username(username, request):
return
else:
# An authenticated user is associated with the request, but
# it does not match the authorized user in the header.
self._remove_invalid_user(request)
# We are seeing this user for the first time in this session, attempt
# to authenticate the user.
user = auth.authenticate(request, remote_user=username)
if user:
# User is valid. Set request.user and persist user in the session
# by logging the user in.
request.user = user
auth.login(request, user)
def clean_username(self, username, request):
"""
Allow the backend to clean the username, if the backend defines a
clean_username method.
"""
backend_str = request.session[auth.BACKEND_SESSION_KEY]
backend = auth.load_backend(backend_str)
try:
username = backend.clean_username(username)
except AttributeError: # Backend has no clean_username method.
pass
return username
def _remove_invalid_user(self, request):
"""
Remove the current authenticated user in the request which is invalid
but only if the user is authenticated via the RemoteUserBackend.
"""
try:
stored_backend = load_backend(request.session.get(auth.BACKEND_SESSION_KEY, ''))
except ImportError:
# backend failed to load
auth.logout(request)
else:
if isinstance(stored_backend, RemoteUserBackend):
auth.logout(request)
class PersistentRemoteUserMiddleware(RemoteUserMiddleware):
"""
Middleware for Web-server provided authentication on logon pages.
Like RemoteUserMiddleware but keeps the user authenticated even if
the header (``REMOTE_USER``) is not found in the request. Useful
for setups when the external authentication via ``REMOTE_USER``
is only expected to happen on some "logon" URL and the rest of
the application wants to use Django's authentication mechanism.
"""
force_logout_if_no_header = False
| bsd-3-clause |
davidmueller13/xbmc | lib/gtest/test/gtest_xml_output_unittest.py | 356 | 13525 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def _GetXmlOutput(self, gtest_prog_name, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = [gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml, expected_exit_code):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| gpl-2.0 |
jvanbrug/alanaldavista | boto/ec2/cloudwatch/datapoint.py | 56 | 1668 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from datetime import datetime
class Datapoint(dict):
def __init__(self, connection=None):
dict.__init__(self)
self.connection = connection
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name in ['Average', 'Maximum', 'Minimum', 'Sum', 'SampleCount']:
self[name] = float(value)
elif name == 'Timestamp':
self[name] = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name != 'member':
self[name] = value
| mit |
martynovp/edx-platform | common/lib/xmodule/xmodule/videoannotation_module.py | 107 | 6445 | """
Module for Video annotations using annotator.
"""
from lxml import etree
from pkg_resources import resource_string
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
from xblock.core import Scope, String
from xmodule.annotator_mixin import get_instructions, get_extension
from xmodule.annotator_token import retrieve_token
from xblock.fragment import Fragment
import textwrap
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
class AnnotatableFields(object):
""" Fields for `VideoModule` and `VideoDescriptor`. """
data = String(
help=_("XML data for the annotation"),
scope=Scope.content,
default=textwrap.dedent("""\
<annotatable>
<instructions>
<p>
Add the instructions to the assignment here.
</p>
</instructions>
</annotatable>
"""))
display_name = String(
display_name=_("Display Name"),
help=_("Display name for this module"),
scope=Scope.settings,
default=_('Video Annotation'),
)
sourceurl = String(
help=_("The external source URL for the video."),
display_name=_("Source URL"),
scope=Scope.settings, default="http://video-js.zencoder.com/oceans-clip.mp4"
)
poster_url = String(
help=_("Poster Image URL"),
display_name=_("Poster URL"),
scope=Scope.settings,
default=""
)
annotation_storage_url = String(
help=_("Location of Annotation backend"),
scope=Scope.settings,
default="http://your_annotation_storage.com",
display_name=_("Url for Annotation Storage")
)
annotation_token_secret = String(
help=_("Secret string for annotation storage"),
scope=Scope.settings,
default="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
display_name=_("Secret Token String for Annotation")
)
default_tab = String(
display_name=_("Default Annotations Tab"),
help=_("Select which tab will be the default in the annotations table: myNotes, Instructor, or Public."),
scope=Scope.settings,
default="myNotes",
)
# currently only supports one instructor, will build functionality for multiple later
instructor_email = String(
display_name=_("Email for 'Instructor' Annotations"),
help=_("Email of the user that will be attached to all annotations that will be found in 'Instructor' tab."),
scope=Scope.settings,
default="",
)
annotation_mode = String(
display_name=_("Mode for Annotation Tool"),
help=_("Type in number corresponding to following modes: 'instructor' or 'everyone'"),
scope=Scope.settings,
default="everyone",
)
class VideoAnnotationModule(AnnotatableFields, XModule):
'''Video Annotation Module'''
js = {
'coffee': [
resource_string(__name__, 'js/src/javascript_loader.coffee'),
resource_string(__name__, 'js/src/html/display.coffee'),
resource_string(__name__, 'js/src/annotatable/display.coffee'),
],
'js': [
resource_string(__name__, 'js/src/collapsible.js'),
]
}
css = {'scss': [resource_string(__name__, 'css/annotatable/display.scss')]}
icon_class = 'videoannotation'
def __init__(self, *args, **kwargs):
super(VideoAnnotationModule, self).__init__(*args, **kwargs)
xmltree = etree.fromstring(self.data)
self.instructions = self._extract_instructions(xmltree)
self.content = etree.tostring(xmltree, encoding='unicode')
self.user_email = ""
self.is_course_staff = False
if self.runtime.get_user_role() in ['instructor', 'staff']:
self.is_course_staff = True
if self.runtime.get_real_user is not None:
try:
self.user_email = self.runtime.get_real_user(self.runtime.anonymous_student_id).email
except Exception: # pylint: disable=broad-except
self.user_email = _("No email address found.")
def _extract_instructions(self, xmltree):
""" Removes <instructions> from the xmltree and returns them as a string, otherwise None. """
return get_instructions(xmltree)
def _get_extension(self, src_url):
''' get the extension of a given url '''
return get_extension(src_url)
def student_view(self, context):
""" Renders parameters to template. """
extension = self._get_extension(self.sourceurl)
context = {
'course_key': self.runtime.course_id,
'display_name': self.display_name_with_default,
'instructions_html': self.instructions,
'sourceUrl': self.sourceurl,
'typeSource': extension,
'poster': self.poster_url,
'content_html': self.content,
'token': retrieve_token(self.user_email, self.annotation_token_secret),
'annotation_storage': self.annotation_storage_url,
'default_tab': self.default_tab,
'instructor_email': self.instructor_email,
'annotation_mode': self.annotation_mode,
'is_course_staff': self.is_course_staff,
}
fragment = Fragment(self.system.render_template('videoannotation.html', context))
# TinyMCE already exists in Studio so we should not load the files again
# get_real_user always returns "None" in Studio since its runtimes contains no anonymous ids
if self.runtime.get_real_user is not None:
fragment.add_javascript_url(self.runtime.STATIC_URL + "js/vendor/tinymce/js/tinymce/tinymce.full.min.js")
fragment.add_javascript_url(self.runtime.STATIC_URL + "js/vendor/tinymce/js/tinymce/jquery.tinymce.min.js")
return fragment
class VideoAnnotationDescriptor(AnnotatableFields, RawDescriptor):
''' Video annotation descriptor '''
module_class = VideoAnnotationModule
mako_template = "widgets/raw-edit.html"
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(VideoAnnotationDescriptor, self).non_editable_metadata_fields
non_editable_fields.extend([
VideoAnnotationDescriptor.annotation_storage_url,
VideoAnnotationDescriptor.annotation_token_secret,
])
return non_editable_fields
| agpl-3.0 |
jmartiuk5/python-mode | pymode/libs3/rope/refactor/method_object.py | 91 | 3868 | import warnings
from rope.base import pyobjects, exceptions, change, evaluate, codeanalyze
from rope.refactor import sourceutils, occurrences, rename
class MethodObject(object):
def __init__(self, project, resource, offset):
self.pycore = project.pycore
this_pymodule = self.pycore.resource_to_pyobject(resource)
pyname = evaluate.eval_location(this_pymodule, offset)
if pyname is None or not isinstance(pyname.get_object(),
pyobjects.PyFunction):
raise exceptions.RefactoringError(
'Replace method with method object refactoring should be '
'performed on a function.')
self.pyfunction = pyname.get_object()
self.pymodule = self.pyfunction.get_module()
self.resource = self.pymodule.get_resource()
def get_new_class(self, name):
body = sourceutils.fix_indentation(
self._get_body(), sourceutils.get_indent(self.pycore) * 2)
return 'class %s(object):\n\n%s%sdef __call__(self):\n%s' % \
(name, self._get_init(),
' ' * sourceutils.get_indent(self.pycore), body)
def get_changes(self, classname=None, new_class_name=None):
if new_class_name is not None:
warnings.warn(
'new_class_name parameter is deprecated; use classname',
DeprecationWarning, stacklevel=2)
classname = new_class_name
collector = codeanalyze.ChangeCollector(self.pymodule.source_code)
start, end = sourceutils.get_body_region(self.pyfunction)
indents = sourceutils.get_indents(
self.pymodule.lines, self.pyfunction.get_scope().get_start()) + \
sourceutils.get_indent(self.pycore)
new_contents = ' ' * indents + 'return %s(%s)()\n' % \
(classname, ', '.join(self._get_parameter_names()))
collector.add_change(start, end, new_contents)
insertion = self._get_class_insertion_point()
collector.add_change(insertion, insertion,
'\n\n' + self.get_new_class(classname))
changes = change.ChangeSet('Replace method with method object refactoring')
changes.add_change(change.ChangeContents(self.resource,
collector.get_changed()))
return changes
def _get_class_insertion_point(self):
current = self.pyfunction
while current.parent != self.pymodule:
current = current.parent
end = self.pymodule.lines.get_line_end(current.get_scope().get_end())
return min(end + 1, len(self.pymodule.source_code))
def _get_body(self):
body = sourceutils.get_body(self.pyfunction)
for param in self._get_parameter_names():
body = param + ' = None\n' + body
pymod = self.pycore.get_string_module(body, self.resource)
pyname = pymod[param]
finder = occurrences.create_finder(self.pycore, param, pyname)
result = rename.rename_in_module(finder, 'self.' + param,
pymodule=pymod)
body = result[result.index('\n') + 1:]
return body
def _get_init(self):
params = self._get_parameter_names()
indents = ' ' * sourceutils.get_indent(self.pycore)
if not params:
return ''
header = indents + 'def __init__(self'
body = ''
for arg in params:
new_name = arg
if arg == 'self':
new_name = 'host'
header += ', %s' % new_name
body += indents * 2 + 'self.%s = %s\n' % (arg, new_name)
header += '):'
return '%s\n%s\n' % (header, body)
def _get_parameter_names(self):
return self.pyfunction.get_param_names()
| lgpl-3.0 |
ghostlander/p2pool-neoscrypt | p2pool/util/p2protocol.py | 216 | 4144 | '''
Generic message-based protocol used by Bitcoin and P2Pool for P2P communication
'''
import hashlib
import struct
from twisted.internet import protocol
from twisted.python import log
import p2pool
from p2pool.util import datachunker, variable
class TooLong(Exception):
pass
class Protocol(protocol.Protocol):
def __init__(self, message_prefix, max_payload_length, traffic_happened=variable.Event(), ignore_trailing_payload=False):
self._message_prefix = message_prefix
self._max_payload_length = max_payload_length
self.dataReceived2 = datachunker.DataChunker(self.dataReceiver())
self.traffic_happened = traffic_happened
self.ignore_trailing_payload = ignore_trailing_payload
def dataReceived(self, data):
self.traffic_happened.happened('p2p/in', len(data))
self.dataReceived2(data)
def dataReceiver(self):
while True:
start = ''
while start != self._message_prefix:
start = (start + (yield 1))[-len(self._message_prefix):]
command = (yield 12).rstrip('\0')
length, = struct.unpack('<I', (yield 4))
if length > self._max_payload_length:
print 'length too large'
continue
checksum = yield 4
payload = yield length
if hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] != checksum:
print 'invalid hash for', self.transport.getPeer().host, repr(command), length, checksum.encode('hex')
if p2pool.DEBUG:
print hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4].encode('hex'), payload.encode('hex')
self.badPeerHappened()
continue
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
if p2pool.DEBUG:
print 'no type for', repr(command)
continue
try:
self.packetReceived(command, type_.unpack(payload, self.ignore_trailing_payload))
except:
print 'RECV', command, payload[:100].encode('hex') + ('...' if len(payload) > 100 else '')
log.err(None, 'Error handling message: (see RECV line)')
self.disconnect()
def packetReceived(self, command, payload2):
handler = getattr(self, 'handle_' + command, None)
if handler is None:
if p2pool.DEBUG:
print 'no handler for', repr(command)
return
if getattr(self, 'connected', True) and not getattr(self, 'disconnecting', False):
handler(**payload2)
def disconnect(self):
if hasattr(self.transport, 'abortConnection'):
# Available since Twisted 11.1
self.transport.abortConnection()
else:
# This doesn't always close timed out connections! warned about in main
self.transport.loseConnection()
def badPeerHappened(self):
self.disconnect()
def sendPacket(self, command, payload2):
if len(command) >= 12:
raise ValueError('command too long')
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
raise ValueError('invalid command')
#print 'SEND', command, repr(payload2)[:500]
payload = type_.pack(payload2)
if len(payload) > self._max_payload_length:
raise TooLong('payload too long')
data = self._message_prefix + struct.pack('<12sI', command, len(payload)) + hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] + payload
self.traffic_happened.happened('p2p/out', len(data))
self.transport.write(data)
def __getattr__(self, attr):
prefix = 'send_'
if attr.startswith(prefix):
command = attr[len(prefix):]
return lambda **payload2: self.sendPacket(command, payload2)
#return protocol.Protocol.__getattr__(self, attr)
raise AttributeError(attr)
| gpl-3.0 |
eXistenZNL/SickRage | lib/github/Status.py | 74 | 2548 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class Status(github.GithubObject.NonCompletableGithubObject):
"""
This class represents status as defined in https://status.github.com/api
"""
@property
def status(self):
"""
:type: string
"""
return self._status.value
@property
def last_updated(self):
"""
:type: datetime.datetime
"""
return self._last_updated.value
def _initAttributes(self):
self._status = github.GithubObject.NotSet
self._last_updated = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "status" in attributes: # pragma no branch
self._status = self._makeStringAttribute(attributes["status"])
if "last_updated" in attributes: # pragma no branch
self._last_updated = self._makeDatetimeAttribute(attributes["last_updated"])
| gpl-3.0 |
dims/glance | glance/tests/unit/test_glare_plugin_loader.py | 1 | 7402 | # Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
import pkg_resources
from glance.common import exception
from glance.common.glare import loader
from glance.contrib.plugins.artifacts_sample.v1 import artifact as art1
from glance.contrib.plugins.artifacts_sample.v2 import artifact as art2
from glance.tests import utils
class MyArtifactDuplicate(art1.MyArtifact):
__type_version__ = '1.0.1'
__type_name__ = 'MyArtifact'
class MyArtifactOk(art1.MyArtifact):
__type_version__ = '1.0.2'
__type_name__ = 'MyArtifact'
class TestArtifactsLoader(utils.BaseTestCase):
def setUp(self):
self.path = 'glance.contrib.plugins.artifacts_sample'
self._setup_loader(['MyArtifact=%s.v1.artifact:MyArtifact' %
self.path])
super(TestArtifactsLoader, self).setUp()
def _setup_loader(self, artifacts):
self.loader = None
mock_this = 'stevedore.extension.ExtensionManager._find_entry_points'
with mock.patch(mock_this) as fep:
fep.return_value = [
pkg_resources.EntryPoint.parse(art) for art in artifacts]
self.loader = loader.ArtifactsPluginLoader(
'glance.artifacts.types')
def test_load(self):
"""
Plugins can be loaded as entrypoint=sigle plugin and
entrypoint=[a, list, of, plugins]
"""
# single version
self.assertEqual(1, len(self.loader.mgr.extensions))
self.assertEqual(art1.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
# entrypoint = [a, list]
path = os.path.splitext(__file__)[0][__file__.rfind(
'glance'):].replace('/', '.')
self._setup_loader([
'MyArtifact=%s:MyArtifactOk' % path,
'MyArtifact=%s.v2.artifact:MyArtifact' % self.path,
'MyArtifact=%s.v1.artifact:MyArtifact' % self.path]),
self.assertEqual(3, len(self.loader.mgr.extensions))
# returns the plugin with the latest version
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
self.assertEqual(art1.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'1.0.1'))
def test_basic_loader_func(self):
"""Test public methods of PluginLoader class here"""
# type_version 2 == 2.0 == 2.0.0
self._setup_loader(
['MyArtifact=%s.v2.artifact:MyArtifact' % self.path])
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2.0'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2.0.0'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2'))
# now make sure that get_class_by_typename works as well
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_typename('MyArtifact'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_typename('MyArtifact', '2'))
def test_config_validation(self):
"""
Plugins can be loaded on certain conditions:
* entry point name == type_name
* no plugin with the same type_name and version has been already
loaded
"""
path = 'glance.contrib.plugins.artifacts_sample'
# here artifacts specific validation is checked
self.assertRaises(exception.ArtifactNonMatchingTypeName,
self._setup_loader,
['non_matching_name=%s.v1.artifact:MyArtifact' %
path])
# make sure this call is ok
self._setup_loader(['MyArtifact=%s.v1.artifact:MyArtifact' % path])
art_type = self.loader.get_class_by_endpoint('myartifact')
self.assertEqual('MyArtifact', art_type.metadata.type_name)
self.assertEqual('1.0.1', art_type.metadata.type_version)
# now try to add duplicate artifact with the same type_name and
# type_version as already exists
bad_art_path = os.path.splitext(__file__)[0][__file__.rfind(
'glance'):].replace('/', '.')
self.assertEqual(art_type.metadata.type_version,
MyArtifactDuplicate.metadata.type_version)
self.assertEqual(art_type.metadata.type_name,
MyArtifactDuplicate.metadata.type_name)
# should raise an exception as (name, version) is not unique
self.assertRaises(
exception.ArtifactDuplicateNameTypeVersion, self._setup_loader,
['MyArtifact=%s.v1.artifact:MyArtifact' % path,
'MyArtifact=%s:MyArtifactDuplicate' % bad_art_path])
# two artifacts with the same name but different versions coexist fine
self.assertEqual('MyArtifact', MyArtifactOk.metadata.type_name)
self.assertNotEqual(art_type.metadata.type_version,
MyArtifactOk.metadata.type_version)
self._setup_loader(['MyArtifact=%s.v1.artifact:MyArtifact' % path,
'MyArtifact=%s:MyArtifactOk' % bad_art_path])
def test_check_function(self):
"""
A test to show that plugin-load specific options in artifacts.conf
are correctly processed:
* no plugins can be loaded if load_enabled = False
* if available_plugins list is given only plugins specified can be
be loaded
"""
self.config(load_enabled=False)
self.assertRaises(exception.ArtifactLoadError,
self._setup_loader,
['MyArtifact=%s.v1.artifact:MyArtifact' % self.path])
self.config(load_enabled=True, available_plugins=['MyArtifact-1.0.2'])
self.assertRaises(exception.ArtifactLoadError,
self._setup_loader,
['MyArtifact=%s.v1.artifact:MyArtifact' % self.path])
path = os.path.splitext(__file__)[0][__file__.rfind(
'glance'):].replace('/', '.')
self._setup_loader(['MyArtifact=%s:MyArtifactOk' % path])
# make sure that plugin_map has the expected plugin
self.assertEqual(MyArtifactOk,
self.loader.get_class_by_endpoint('myartifact',
'1.0.2'))
| apache-2.0 |
savoirfairelinux/django | tests/redirects_tests/tests.py | 4 | 3823 | from django.conf import settings
from django.contrib.redirects.middleware import RedirectFallbackMiddleware
from django.contrib.redirects.models import Redirect
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.test import TestCase, modify_settings, override_settings
@modify_settings(MIDDLEWARE={'append': 'django.contrib.redirects.middleware.RedirectFallbackMiddleware'})
@override_settings(APPEND_SLASH=False, ROOT_URLCONF='redirects_tests.urls', SITE_ID=1)
class RedirectTests(TestCase):
def setUp(self):
self.site = Site.objects.get(pk=settings.SITE_ID)
def test_model(self):
r1 = Redirect.objects.create(site=self.site, old_path='/initial', new_path='/new_target')
self.assertEqual(str(r1), "/initial ---> /new_target")
def test_redirect(self):
Redirect.objects.create(site=self.site, old_path='/initial', new_path='/new_target')
response = self.client.get('/initial')
self.assertRedirects(response, '/new_target', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_with_append_slash(self):
Redirect.objects.create(site=self.site, old_path='/initial/', new_path='/new_target/')
response = self.client.get('/initial')
self.assertRedirects(response, '/new_target/', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_with_append_slash_and_query_string(self):
Redirect.objects.create(site=self.site, old_path='/initial/?foo', new_path='/new_target/')
response = self.client.get('/initial?foo')
self.assertRedirects(response, '/new_target/', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_not_found_with_append_slash(self):
"""
Exercise the second Redirect.DoesNotExist branch in
RedirectFallbackMiddleware.
"""
response = self.client.get('/test')
self.assertEqual(response.status_code, 404)
def test_redirect_shortcircuits_non_404_response(self):
"""RedirectFallbackMiddleware short-circuits on non-404 requests."""
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_response_gone(self):
"""When the redirect target is '', return a 410"""
Redirect.objects.create(site=self.site, old_path='/initial', new_path='')
response = self.client.get('/initial')
self.assertEqual(response.status_code, 410)
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
def test_sites_not_installed(self):
with self.assertRaises(ImproperlyConfigured):
RedirectFallbackMiddleware()
class OverriddenRedirectFallbackMiddleware(RedirectFallbackMiddleware):
# Use HTTP responses different from the defaults
response_gone_class = HttpResponseForbidden
response_redirect_class = HttpResponseRedirect
@modify_settings(MIDDLEWARE={'append': 'redirects_tests.tests.OverriddenRedirectFallbackMiddleware'})
@override_settings(SITE_ID=1)
class OverriddenRedirectMiddlewareTests(TestCase):
def setUp(self):
self.site = Site.objects.get(pk=settings.SITE_ID)
def test_response_gone_class(self):
Redirect.objects.create(site=self.site, old_path='/initial/', new_path='')
response = self.client.get('/initial/')
self.assertEqual(response.status_code, 403)
def test_response_redirect_class(self):
Redirect.objects.create(site=self.site, old_path='/initial/', new_path='/new_target/')
response = self.client.get('/initial/')
self.assertEqual(response.status_code, 302)
| bsd-3-clause |
aricchen/openHR | openerp/addons/l10n_cn/__openerp__.py | 91 | 1827 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2009 Gábor Dukai
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': '中国会计科目表 - Accounting',
'version': '1.0',
'category': 'Localization/Account Charts',
'author': 'openerp-china.org',
'maintainer':'openerp-china.org',
'website':'http://openerp-china.org',
'url': 'http://code.google.com/p/openerp-china/source/browse/#svn/trunk/l10n_cn',
'description': """
添加中文省份数据
科目类型\会计科目表模板\增值税\辅助核算类别\管理会计凭证簿\财务会计凭证簿
============================================================
""",
'depends': ['base','account'],
'demo': [],
'data': [
'account_chart.xml',
'l10n_chart_cn_wizard.xml',
'base_data.xml',
],
'license': 'GPL-3',
'auto_install': False,
'installable': True,
'images': ['images/config_chart_l10n_cn.jpeg','images/l10n_cn_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
thoughtpalette/thoughts.thoughtpalette.com | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/_phpbuiltins.py | 95 | 122088 | # -*- coding: utf-8 -*-
"""
pygments.lexers._phpbuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file loads the function names and their modules from the
php webpage and generates itself.
Do not alter the MODULES dict by hand!
WARNING: the generation transfers quite much data over your
internet connection. don't run that at home, use
a server ;-)
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
MODULES = {'.NET': ['dotnet_load'],
'APC': ['apc_add',
'apc_bin_dump',
'apc_bin_dumpfile',
'apc_bin_load',
'apc_bin_loadfile',
'apc_cache_info',
'apc_cas',
'apc_clear_cache',
'apc_compile_file',
'apc_dec',
'apc_define_constants',
'apc_delete_file',
'apc_delete',
'apc_exists',
'apc_fetch',
'apc_inc',
'apc_load_constants',
'apc_sma_info',
'apc_store'],
'APD': ['apd_breakpoint',
'apd_callstack',
'apd_clunk',
'apd_continue',
'apd_croak',
'apd_dump_function_table',
'apd_dump_persistent_resources',
'apd_dump_regular_resources',
'apd_echo',
'apd_get_active_symbols',
'apd_set_pprof_trace',
'apd_set_session_trace_socket',
'apd_set_session_trace',
'apd_set_session',
'override_function',
'rename_function'],
'Aliases and deprecated Mysqli': ['mysqli_bind_param',
'mysqli_bind_result',
'mysqli_client_encoding',
'mysqli_connect',
'mysqli_disable_reads_from_master',
'mysqli_disable_rpl_parse',
'mysqli_enable_reads_from_master',
'mysqli_enable_rpl_parse',
'mysqli_escape_string',
'mysqli_execute',
'mysqli_fetch',
'mysqli_get_metadata',
'mysqli_master_query',
'mysqli_param_count',
'mysqli_report',
'mysqli_rpl_parse_enabled',
'mysqli_rpl_probe',
'mysqli_rpl_query_type',
'mysqli_send_long_data',
'mysqli_send_query',
'mysqli_set_opt',
'mysqli_slave_query'],
'Apache': ['apache_child_terminate',
'apache_get_modules',
'apache_get_version',
'apache_getenv',
'apache_lookup_uri',
'apache_note',
'apache_request_headers',
'apache_reset_timeout',
'apache_response_headers',
'apache_setenv',
'getallheaders',
'virtual'],
'Array': ['array_change_key_case',
'array_chunk',
'array_combine',
'array_count_values',
'array_diff_assoc',
'array_diff_key',
'array_diff_uassoc',
'array_diff_ukey',
'array_diff',
'array_fill_keys',
'array_fill',
'array_filter',
'array_flip',
'array_intersect_assoc',
'array_intersect_key',
'array_intersect_uassoc',
'array_intersect_ukey',
'array_intersect',
'array_key_exists',
'array_keys',
'array_map',
'array_merge_recursive',
'array_merge',
'array_multisort',
'array_pad',
'array_pop',
'array_product',
'array_push',
'array_rand',
'array_reduce',
'array_replace_recursive',
'array_replace',
'array_reverse',
'array_search',
'array_shift',
'array_slice',
'array_splice',
'array_sum',
'array_udiff_assoc',
'array_udiff_uassoc',
'array_udiff',
'array_uintersect_assoc',
'array_uintersect_uassoc',
'array_uintersect',
'array_unique',
'array_unshift',
'array_values',
'array_walk_recursive',
'array_walk',
'array',
'arsort',
'asort',
'compact',
'count',
'current',
'each',
'end',
'extract',
'in_array',
'key',
'krsort',
'ksort',
'list',
'natcasesort',
'natsort',
'next',
'pos',
'prev',
'range',
'reset',
'rsort',
'shuffle',
'sizeof',
'sort',
'uasort',
'uksort',
'usort'],
'BBCode': ['bbcode_add_element',
'bbcode_add_smiley',
'bbcode_create',
'bbcode_destroy',
'bbcode_parse',
'bbcode_set_arg_parser',
'bbcode_set_flags'],
'BC Math': ['bcadd',
'bccomp',
'bcdiv',
'bcmod',
'bcmul',
'bcpow',
'bcpowmod',
'bcscale',
'bcsqrt',
'bcsub'],
'Bzip2': ['bzclose',
'bzcompress',
'bzdecompress',
'bzerrno',
'bzerror',
'bzerrstr',
'bzflush',
'bzopen',
'bzread',
'bzwrite'],
'COM': ['com_addref',
'com_create_guid',
'com_event_sink',
'com_get_active_object',
'com_get',
'com_invoke',
'com_isenum',
'com_load_typelib',
'com_load',
'com_message_pump',
'com_print_typeinfo',
'com_propget',
'com_propput',
'com_propset',
'com_release',
'com_set',
'variant_abs',
'variant_add',
'variant_and',
'variant_cast',
'variant_cat',
'variant_cmp',
'variant_date_from_timestamp',
'variant_date_to_timestamp',
'variant_div',
'variant_eqv',
'variant_fix',
'variant_get_type',
'variant_idiv',
'variant_imp',
'variant_int',
'variant_mod',
'variant_mul',
'variant_neg',
'variant_not',
'variant_or',
'variant_pow',
'variant_round',
'variant_set_type',
'variant_set',
'variant_sub',
'variant_xor'],
'CUBRID': ['cubrid_affected_rows',
'cubrid_bind',
'cubrid_close_prepare',
'cubrid_close_request',
'cubrid_col_get',
'cubrid_col_size',
'cubrid_column_names',
'cubrid_column_types',
'cubrid_commit',
'cubrid_connect_with_url',
'cubrid_connect',
'cubrid_current_oid',
'cubrid_disconnect',
'cubrid_drop',
'cubrid_error_code_facility',
'cubrid_error_code',
'cubrid_error_msg',
'cubrid_execute',
'cubrid_fetch',
'cubrid_free_result',
'cubrid_get_charset',
'cubrid_get_class_name',
'cubrid_get_client_info',
'cubrid_get_db_parameter',
'cubrid_get_server_info',
'cubrid_get',
'cubrid_insert_id',
'cubrid_is_instance',
'cubrid_lob_close',
'cubrid_lob_export',
'cubrid_lob_get',
'cubrid_lob_send',
'cubrid_lob_size',
'cubrid_lock_read',
'cubrid_lock_write',
'cubrid_move_cursor',
'cubrid_num_cols',
'cubrid_num_rows',
'cubrid_prepare',
'cubrid_put',
'cubrid_rollback',
'cubrid_schema',
'cubrid_seq_drop',
'cubrid_seq_insert',
'cubrid_seq_put',
'cubrid_set_add',
'cubrid_set_drop',
'cubrid_version'],
'Cairo': ['cairo_create',
'cairo_font_face_get_type',
'cairo_font_face_status',
'cairo_font_options_create',
'cairo_font_options_equal',
'cairo_font_options_get_antialias',
'cairo_font_options_get_hint_metrics',
'cairo_font_options_get_hint_style',
'cairo_font_options_get_subpixel_order',
'cairo_font_options_hash',
'cairo_font_options_merge',
'cairo_font_options_set_antialias',
'cairo_font_options_set_hint_metrics',
'cairo_font_options_set_hint_style',
'cairo_font_options_set_subpixel_order',
'cairo_font_options_status',
'cairo_format_stride_for_width',
'cairo_image_surface_create_for_data',
'cairo_image_surface_create_from_png',
'cairo_image_surface_create',
'cairo_image_surface_get_data',
'cairo_image_surface_get_format',
'cairo_image_surface_get_height',
'cairo_image_surface_get_stride',
'cairo_image_surface_get_width',
'cairo_matrix_create_scale',
'cairo_matrix_create_translate',
'cairo_matrix_invert',
'cairo_matrix_multiply',
'cairo_matrix_rotate',
'cairo_matrix_transform_distance',
'cairo_matrix_transform_point',
'cairo_matrix_translate',
'cairo_pattern_add_color_stop_rgb',
'cairo_pattern_add_color_stop_rgba',
'cairo_pattern_create_for_surface',
'cairo_pattern_create_linear',
'cairo_pattern_create_radial',
'cairo_pattern_create_rgb',
'cairo_pattern_create_rgba',
'cairo_pattern_get_color_stop_count',
'cairo_pattern_get_color_stop_rgba',
'cairo_pattern_get_extend',
'cairo_pattern_get_filter',
'cairo_pattern_get_linear_points',
'cairo_pattern_get_matrix',
'cairo_pattern_get_radial_circles',
'cairo_pattern_get_rgba',
'cairo_pattern_get_surface',
'cairo_pattern_get_type',
'cairo_pattern_set_extend',
'cairo_pattern_set_filter',
'cairo_pattern_set_matrix',
'cairo_pattern_status',
'cairo_pdf_surface_create',
'cairo_pdf_surface_set_size',
'cairo_ps_get_levels',
'cairo_ps_level_to_string',
'cairo_ps_surface_create',
'cairo_ps_surface_dsc_begin_page_setup',
'cairo_ps_surface_dsc_begin_setup',
'cairo_ps_surface_dsc_comment',
'cairo_ps_surface_get_eps',
'cairo_ps_surface_restrict_to_level',
'cairo_ps_surface_set_eps',
'cairo_ps_surface_set_size',
'cairo_scaled_font_create',
'cairo_scaled_font_extents',
'cairo_scaled_font_get_ctm',
'cairo_scaled_font_get_font_face',
'cairo_scaled_font_get_font_matrix',
'cairo_scaled_font_get_font_options',
'cairo_scaled_font_get_scale_matrix',
'cairo_scaled_font_get_type',
'cairo_scaled_font_glyph_extents',
'cairo_scaled_font_status',
'cairo_scaled_font_text_extents',
'cairo_surface_copy_page',
'cairo_surface_create_similar',
'cairo_surface_finish',
'cairo_surface_flush',
'cairo_surface_get_content',
'cairo_surface_get_device_offset',
'cairo_surface_get_font_options',
'cairo_surface_get_type',
'cairo_surface_mark_dirty_rectangle',
'cairo_surface_mark_dirty',
'cairo_surface_set_device_offset',
'cairo_surface_set_fallback_resolution',
'cairo_surface_show_page',
'cairo_surface_status',
'cairo_surface_write_to_png',
'cairo_svg_surface_create',
'cairo_svg_surface_restrict_to_version',
'cairo_svg_version_to_string'],
'Calendar': ['cal_days_in_month',
'cal_from_jd',
'cal_info',
'cal_to_jd',
'easter_date',
'easter_days',
'FrenchToJD',
'GregorianToJD',
'JDDayOfWeek',
'JDMonthName',
'JDToFrench',
'JDToGregorian',
'jdtojewish',
'JDToJulian',
'jdtounix',
'JewishToJD',
'JulianToJD',
'unixtojd'],
'Classes/Object': ['call_user_method_array',
'call_user_method',
'class_alias',
'class_exists',
'get_called_class',
'get_class_methods',
'get_class_vars',
'get_class',
'get_declared_classes',
'get_declared_interfaces',
'get_object_vars',
'get_parent_class',
'interface_exists',
'is_a',
'is_subclass_of',
'method_exists',
'property_exists'],
'Classkit': ['classkit_import',
'classkit_method_add',
'classkit_method_copy',
'classkit_method_redefine',
'classkit_method_remove',
'classkit_method_rename'],
'Crack': ['crack_check',
'crack_closedict',
'crack_getlastmessage',
'crack_opendict'],
'Ctype': ['ctype_alnum',
'ctype_alpha',
'ctype_cntrl',
'ctype_digit',
'ctype_graph',
'ctype_lower',
'ctype_print',
'ctype_punct'],
'Cyrus': ['cyrus_authenticate',
'cyrus_bind',
'cyrus_close',
'cyrus_connect',
'cyrus_query',
'cyrus_unbind'],
'DB++': ['dbplus_add',
'dbplus_aql',
'dbplus_chdir',
'dbplus_close',
'dbplus_curr',
'dbplus_errcode',
'dbplus_errno',
'dbplus_find',
'dbplus_first',
'dbplus_flush',
'dbplus_freealllocks',
'dbplus_freelock',
'dbplus_freerlocks',
'dbplus_getlock',
'dbplus_getunique',
'dbplus_info',
'dbplus_last',
'dbplus_lockrel',
'dbplus_next',
'dbplus_open',
'dbplus_prev',
'dbplus_rchperm',
'dbplus_rcreate',
'dbplus_rcrtexact',
'dbplus_rcrtlike',
'dbplus_resolve',
'dbplus_restorepos',
'dbplus_rkeys',
'dbplus_ropen',
'dbplus_rquery',
'dbplus_rrename',
'dbplus_rsecindex',
'dbplus_runlink',
'dbplus_rzap',
'dbplus_savepos',
'dbplus_setindex',
'dbplus_setindexbynumber',
'dbplus_sql',
'dbplus_tcl',
'dbplus_tremove',
'dbplus_undo',
'dbplus_undoprepare',
'dbplus_unlockrel',
'dbplus_unselect',
'dbplus_update',
'dbplus_xlockrel',
'dbplus_xunlockrel'],
'DBA': ['dba_close',
'dba_delete',
'dba_exists',
'dba_fetch',
'dba_firstkey',
'dba_handlers',
'dba_insert',
'dba_key_split',
'dba_list',
'dba_nextkey',
'dba_open',
'dba_optimize',
'dba_popen',
'dba_replace',
'dba_sync'],
'DOM': ['dom_import_simplexml'],
'DOM XML (PHP 4)': ['domxml_new_doc',
'domxml_open_file',
'domxml_open_mem',
'domxml_version',
'domxml_xmltree',
'domxml_xslt_stylesheet_doc',
'domxml_xslt_stylesheet_file',
'domxml_xslt_stylesheet',
'domxml_xslt_version',
'xpath_eval_expression',
'xpath_eval',
'xpath_new_context',
'xpath_register_ns_auto',
'xpath_register_ns',
'xptr_eval',
'xptr_new_context'],
'Date/Time': ['checkdate',
'date_add',
'date_create_from_format',
'date_create',
'date_date_set',
'date_default_timezone_get',
'date_default_timezone_set',
'date_diff',
'date_format',
'date_get_last_errors',
'date_interval_create_from_date_string',
'date_interval_format',
'date_isodate_set',
'date_modify',
'date_offset_get',
'date_parse_from_format',
'date_parse',
'date_sub',
'date_sun_info',
'date_sunrise',
'date_sunset',
'date_time_set',
'date_timestamp_get',
'date_timestamp_set',
'date_timezone_get',
'date_timezone_set',
'date',
'getdate',
'gettimeofday',
'gmdate',
'gmmktime',
'gmstrftime',
'idate',
'localtime',
'microtime',
'mktime',
'strftime',
'strptime',
'strtotime',
'time',
'timezone_abbreviations_list',
'timezone_identifiers_list',
'timezone_location_get',
'timezone_name_from_abbr',
'timezone_name_get',
'timezone_offset_get',
'timezone_open',
'timezone_transitions_get',
'timezone_version_get'],
'Direct IO': ['dio_close', 'dio_fcntl', 'dio_open'],
'Directory': ['chdir',
'chroot',
'closedir',
'getcwd',
'opendir',
'readdir',
'rewinddir',
'scandir'],
'Enchant': ['enchant_broker_describe',
'enchant_broker_dict_exists',
'enchant_broker_free_dict',
'enchant_broker_free',
'enchant_broker_get_error',
'enchant_broker_init',
'enchant_broker_list_dicts',
'enchant_broker_request_dict',
'enchant_broker_request_pwl_dict',
'enchant_broker_set_ordering',
'enchant_dict_add_to_personal',
'enchant_dict_add_to_session',
'enchant_dict_check',
'enchant_dict_describe',
'enchant_dict_get_error',
'enchant_dict_is_in_session',
'enchant_dict_quick_check',
'enchant_dict_store_replacement',
'enchant_dict_suggest'],
'Error Handling': ['debug_backtrace',
'debug_print_backtrace',
'error_get_last',
'error_log',
'error_reporting',
'restore_error_handler',
'restore_exception_handler',
'set_error_handler',
'set_exception_handler',
'trigger_error',
'user_error'],
'Exif': ['exif_imagetype',
'exif_read_data',
'exif_tagname',
'exif_thumbnail',
'read_exif_data'],
'Expect': ['expect_expectl'],
'FAM': ['fam_cancel_monitor',
'fam_close',
'fam_monitor_collection',
'fam_monitor_directory',
'fam_monitor_file',
'fam_next_event',
'fam_open',
'fam_pending',
'fam_resume_monitor',
'fam_suspend_monitor'],
'FDF': ['fdf_add_doc_javascript',
'fdf_add_template',
'fdf_close',
'fdf_create',
'fdf_enum_values',
'fdf_errno',
'fdf_error',
'fdf_get_ap',
'fdf_get_attachment',
'fdf_get_encoding',
'fdf_get_file',
'fdf_get_flags',
'fdf_get_opt',
'fdf_get_status',
'fdf_get_value',
'fdf_get_version',
'fdf_header',
'fdf_next_field_name',
'fdf_open_string',
'fdf_open',
'fdf_remove_item',
'fdf_save_string',
'fdf_save',
'fdf_set_ap',
'fdf_set_encoding',
'fdf_set_file',
'fdf_set_flags',
'fdf_set_javascript_action',
'fdf_set_on_import_javascript',
'fdf_set_opt',
'fdf_set_status',
'fdf_set_submit_form_action',
'fdf_set_target_frame',
'fdf_set_value',
'fdf_set_version'],
'FTP': ['ftp_alloc',
'ftp_cdup',
'ftp_chdir',
'ftp_chmod',
'ftp_close',
'ftp_connect',
'ftp_delete',
'ftp_exec',
'ftp_fget',
'ftp_fput',
'ftp_get_option',
'ftp_get',
'ftp_login',
'ftp_mdtm',
'ftp_mkdir',
'ftp_nb_continue',
'ftp_nb_fget',
'ftp_nb_fput',
'ftp_nb_get',
'ftp_nb_put',
'ftp_nlist',
'ftp_pasv',
'ftp_put',
'ftp_pwd',
'ftp_quit',
'ftp_raw',
'ftp_rawlist',
'ftp_rename',
'ftp_rmdir',
'ftp_set_option',
'ftp_site',
'ftp_size',
'ftp_ssl_connect',
'ftp_systype'],
'Fileinfo': ['finfo_buffer',
'finfo_close',
'finfo_file',
'finfo_open',
'finfo_set_flags',
'mime_content_type'],
'Filesystem': ['basename',
'chgrp',
'chmod',
'chown',
'clearstatcache',
'copy',
'dirname',
'disk_free_space',
'disk_total_space',
'diskfreespace',
'fclose',
'feof',
'fflush',
'fgetc',
'fgetcsv',
'fgets',
'fgetss',
'file_exists',
'file_get_contents',
'file_put_contents',
'file',
'fileatime',
'filectime',
'filegroup',
'fileinode',
'filemtime',
'fileowner',
'fileperms',
'filesize',
'filetype',
'flock',
'fnmatch',
'fopen',
'fpassthru',
'fputcsv',
'fputs',
'fread',
'fscanf',
'fseek',
'fstat',
'ftell',
'ftruncate',
'fwrite',
'glob',
'is_dir',
'is_executable',
'is_file',
'is_link',
'is_readable',
'is_uploaded_file',
'is_writable',
'is_writeable',
'lchgrp',
'lchown',
'link',
'linkinfo',
'lstat',
'mkdir',
'move_uploaded_file',
'parse_ini_file',
'parse_ini_string',
'pathinfo',
'pclose',
'popen',
'readfile',
'readlink',
'realpath_cache_get',
'realpath_cache_size',
'realpath',
'rename',
'rewind',
'rmdir',
'set_file_buffer',
'stat',
'symlink',
'tempnam',
'tmpfile',
'touch',
'umask',
'unlink'],
'Filter': ['filter_has_var',
'filter_id',
'filter_input_array',
'filter_input',
'filter_list',
'filter_var_array',
'filter_var'],
'Firebird/InterBase': ['ibase_add_user',
'ibase_affected_rows',
'ibase_backup',
'ibase_blob_add',
'ibase_blob_cancel',
'ibase_blob_close',
'ibase_blob_create',
'ibase_blob_echo',
'ibase_blob_get',
'ibase_blob_import',
'ibase_blob_info',
'ibase_blob_open',
'ibase_close',
'ibase_commit_ret',
'ibase_commit',
'ibase_connect',
'ibase_db_info',
'ibase_delete_user',
'ibase_drop_db',
'ibase_errcode',
'ibase_errmsg',
'ibase_execute',
'ibase_fetch_assoc',
'ibase_fetch_object',
'ibase_fetch_row',
'ibase_field_info',
'ibase_free_event_handler',
'ibase_free_query',
'ibase_free_result',
'ibase_gen_id',
'ibase_maintain_db',
'ibase_modify_user',
'ibase_name_result',
'ibase_num_fields',
'ibase_num_params',
'ibase_param_info',
'ibase_pconnect',
'ibase_prepare',
'ibase_query',
'ibase_restore',
'ibase_rollback_ret',
'ibase_rollback',
'ibase_server_info',
'ibase_service_attach',
'ibase_service_detach',
'ibase_set_event_handler',
'ibase_timefmt',
'ibase_trans',
'ibase_wait_event'],
'FriBiDi': ['fribidi_log2vis'],
'FrontBase': ['fbsql_affected_rows',
'fbsql_autocommit',
'fbsql_blob_size',
'fbsql_change_user',
'fbsql_clob_size',
'fbsql_close',
'fbsql_commit',
'fbsql_connect',
'fbsql_create_blob',
'fbsql_create_clob',
'fbsql_create_db',
'fbsql_data_seek',
'fbsql_database_password',
'fbsql_database',
'fbsql_db_query',
'fbsql_db_status',
'fbsql_drop_db',
'fbsql_errno',
'fbsql_error',
'fbsql_fetch_array',
'fbsql_fetch_assoc',
'fbsql_fetch_field',
'fbsql_fetch_lengths',
'fbsql_fetch_object',
'fbsql_fetch_row',
'fbsql_field_flags',
'fbsql_field_len',
'fbsql_field_name',
'fbsql_field_seek',
'fbsql_field_table',
'fbsql_field_type',
'fbsql_free_result',
'fbsql_get_autostart_info',
'fbsql_hostname',
'fbsql_insert_id',
'fbsql_list_dbs',
'fbsql_list_fields',
'fbsql_list_tables',
'fbsql_next_result',
'fbsql_num_fields',
'fbsql_num_rows',
'fbsql_password',
'fbsql_pconnect',
'fbsql_query',
'fbsql_read_blob',
'fbsql_read_clob',
'fbsql_result',
'fbsql_rollback',
'fbsql_rows_fetched',
'fbsql_select_db',
'fbsql_set_characterset',
'fbsql_set_lob_mode',
'fbsql_set_password',
'fbsql_set_transaction',
'fbsql_start_db',
'fbsql_stop_db',
'fbsql_table_name',
'fbsql_tablename',
'fbsql_username',
'fbsql_warnings'],
'Function handling': ['call_user_func_array',
'call_user_func',
'create_function',
'forward_static_call_array',
'forward_static_call',
'func_get_arg',
'func_get_args',
'func_num_args',
'function_exists',
'get_defined_functions',
'register_shutdown_function',
'register_tick_function',
'unregister_tick_function'],
'GD and Image': ['gd_info',
'getimagesize',
'image_type_to_extension',
'image_type_to_mime_type'],
'GMP': ['gmp_abs',
'gmp_add',
'gmp_and',
'gmp_clrbit',
'gmp_cmp',
'gmp_com',
'gmp_div_q',
'gmp_div_qr',
'gmp_div_r',
'gmp_div',
'gmp_divexact',
'gmp_fact',
'gmp_gcd',
'gmp_gcdext',
'gmp_hamdist',
'gmp_init',
'gmp_intval',
'gmp_invert',
'gmp_jacobi',
'gmp_legendre',
'gmp_mod',
'gmp_mul',
'gmp_neg',
'gmp_nextprime',
'gmp_or',
'gmp_perfect_square',
'gmp_popcount',
'gmp_pow',
'gmp_powm',
'gmp_prob_prime',
'gmp_random',
'gmp_scan0',
'gmp_scan1',
'gmp_setbit',
'gmp_sign',
'gmp_sqrt',
'gmp_sqrtrem',
'gmp_strval',
'gmp_sub',
'gmp_testbit',
'gmp_xor'],
'GeoIP': ['geoip_continent_code_by_name',
'geoip_country_code_by_name',
'geoip_country_code3_by_name',
'geoip_country_name_by_name',
'geoip_database_info',
'geoip_db_avail',
'geoip_db_filename',
'geoip_db_get_all_info',
'geoip_id_by_name',
'geoip_isp_by_name',
'geoip_org_by_name',
'geoip_record_by_name',
'geoip_region_by_name',
'geoip_region_name_by_code',
'geoip_time_zone_by_country_and_region'],
'Gettext': ['bind_textdomain_codeset',
'bindtextdomain',
'dcgettext',
'dcngettext',
'dgettext',
'dngettext',
'gettext',
'ngettext',
'textdomain'],
'GnuPG': ['gnupg_adddecryptkey',
'gnupg_addencryptkey',
'gnupg_addsignkey',
'gnupg_cleardecryptkeys',
'gnupg_clearencryptkeys',
'gnupg_clearsignkeys',
'gnupg_decrypt',
'gnupg_decryptverify',
'gnupg_encrypt',
'gnupg_encryptsign',
'gnupg_export',
'gnupg_geterror',
'gnupg_getprotocol',
'gnupg_import',
'gnupg_init',
'gnupg_keyinfo',
'gnupg_setarmor',
'gnupg_seterrormode',
'gnupg_setsignmode',
'gnupg_sign',
'gnupg_verify'],
'Gopher': ['gopher_parsedir'],
'Grapheme': ['grapheme_extract',
'grapheme_stripos',
'grapheme_stristr',
'grapheme_strlen',
'grapheme_strpos',
'grapheme_strripos',
'grapheme_strrpos',
'grapheme_strstr',
'grapheme_substr'],
'Gupnp': ['gupnp_context_get_host_ip',
'gupnp_context_get_port',
'gupnp_context_get_subscription_timeout',
'gupnp_context_host_path',
'gupnp_context_new',
'gupnp_context_set_subscription_timeout',
'gupnp_context_timeout_add',
'gupnp_context_unhost_path',
'gupnp_control_point_browse_start',
'gupnp_control_point_browse_stop',
'gupnp_control_point_callback_set',
'gupnp_control_point_new',
'gupnp_device_action_callback_set',
'gupnp_device_info_get_service',
'gupnp_device_info_get',
'gupnp_root_device_get_available',
'gupnp_root_device_get_relative_location',
'gupnp_root_device_new',
'gupnp_root_device_set_available',
'gupnp_root_device_start',
'gupnp_root_device_stop',
'gupnp_service_action_get',
'gupnp_service_action_return_error',
'gupnp_service_action_return',
'gupnp_service_action_set',
'gupnp_service_freeze_notify',
'gupnp_service_info_get_introspection',
'gupnp_service_info_get',
'gupnp_service_introspection_get_state_variable',
'gupnp_service_notify',
'gupnp_service_proxy_action_get',
'gupnp_service_proxy_action_set',
'gupnp_service_proxy_add_notify',
'gupnp_service_proxy_callback_set',
'gupnp_service_proxy_get_subscribed',
'gupnp_service_proxy_remove_notify',
'gupnp_service_proxy_set_subscribed',
'gupnp_service_thaw_notify'],
'HTTP': ['http_cache_etag',
'http_cache_last_modified',
'http_chunked_decode',
'http_deflate',
'http_inflate',
'http_build_cookie',
'http_date',
'http_get_request_body_stream',
'http_get_request_body',
'http_get_request_headers',
'http_match_etag',
'http_match_modified',
'http_match_request_header',
'http_support',
'http_negotiate_charset',
'http_negotiate_content_type',
'http_negotiate_language',
'ob_deflatehandler',
'ob_etaghandler',
'ob_inflatehandler',
'http_parse_cookie',
'http_parse_headers',
'http_parse_message',
'http_parse_params',
'http_persistent_handles_clean',
'http_persistent_handles_count',
'http_persistent_handles_ident',
'http_get',
'http_head',
'http_post_data',
'http_post_fields',
'http_put_data',
'http_put_file',
'http_put_stream',
'http_request_body_encode',
'http_request_method_exists',
'http_request_method_name',
'http_request_method_register',
'http_request_method_unregister',
'http_request',
'http_redirect',
'http_send_content_disposition',
'http_send_content_type',
'http_send_data',
'http_send_file',
'http_send_last_modified',
'http_send_status',
'http_send_stream',
'http_throttle',
'http_build_str',
'http_build_url'],
'Hash': ['hash_algos',
'hash_copy',
'hash_file',
'hash_final',
'hash_hmac_file',
'hash_hmac',
'hash_init',
'hash_update_file',
'hash_update_stream',
'hash_update',
'hash'],
'Hyperwave': ['hw_Array2Objrec',
'hw_changeobject',
'hw_Children',
'hw_ChildrenObj',
'hw_Close',
'hw_Connect',
'hw_connection_info',
'hw_cp',
'hw_Deleteobject',
'hw_DocByAnchor',
'hw_DocByAnchorObj',
'hw_Document_Attributes',
'hw_Document_BodyTag',
'hw_Document_Content',
'hw_Document_SetContent',
'hw_Document_Size',
'hw_dummy',
'hw_EditText',
'hw_Error',
'hw_ErrorMsg',
'hw_Free_Document',
'hw_GetAnchors',
'hw_GetAnchorsObj',
'hw_GetAndLock',
'hw_GetChildColl',
'hw_GetChildCollObj',
'hw_GetChildDocColl',
'hw_GetChildDocCollObj',
'hw_GetObject',
'hw_GetObjectByQuery',
'hw_GetObjectByQueryColl',
'hw_GetObjectByQueryCollObj',
'hw_GetObjectByQueryObj',
'hw_GetParents',
'hw_GetParentsObj',
'hw_getrellink',
'hw_GetRemote',
'hw_getremotechildren',
'hw_GetSrcByDestObj',
'hw_GetText',
'hw_getusername',
'hw_Identify',
'hw_InCollections',
'hw_Info',
'hw_InsColl',
'hw_InsDoc',
'hw_insertanchors',
'hw_InsertDocument',
'hw_InsertObject',
'hw_mapid',
'hw_Modifyobject',
'hw_mv',
'hw_New_Document',
'hw_objrec2array',
'hw_Output_Document',
'hw_pConnect',
'hw_PipeDocument',
'hw_Root',
'hw_setlinkroot',
'hw_stat',
'hw_Unlock',
'hw_Who'],
'Hyperwave API': ['hw_api_attribute',
'hwapi_hgcsp',
'hw_api_content',
'hw_api_object'],
'IBM DB2': ['db2_autocommit',
'db2_bind_param',
'db2_client_info',
'db2_close',
'db2_column_privileges',
'db2_columns',
'db2_commit',
'db2_conn_error',
'db2_conn_errormsg',
'db2_connect',
'db2_cursor_type',
'db2_escape_string',
'db2_exec',
'db2_execute',
'db2_fetch_array',
'db2_fetch_assoc',
'db2_fetch_both',
'db2_fetch_object',
'db2_fetch_row',
'db2_field_display_size',
'db2_field_name',
'db2_field_num',
'db2_field_precision',
'db2_field_scale',
'db2_field_type',
'db2_field_width',
'db2_foreign_keys',
'db2_free_result',
'db2_free_stmt',
'db2_get_option',
'db2_last_insert_id'],
'ID3': ['id3_get_frame_long_name',
'id3_get_frame_short_name',
'id3_get_genre_id',
'id3_get_genre_list',
'id3_get_genre_name',
'id3_get_tag',
'id3_get_version',
'id3_remove_tag',
'id3_set_tag'],
'IDN': ['idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'],
'IIS': ['iis_add_server',
'iis_get_dir_security',
'iis_get_script_map',
'iis_get_server_by_comment',
'iis_get_server_by_path',
'iis_get_server_rights',
'iis_get_service_state',
'iis_remove_server',
'iis_set_app_settings',
'iis_set_dir_security',
'iis_set_script_map',
'iis_set_server_rights',
'iis_start_server',
'iis_start_service',
'iis_stop_server',
'iis_stop_service'],
'IMAP': ['imap_8bit',
'imap_alerts',
'imap_append',
'imap_base64',
'imap_binary',
'imap_body',
'imap_bodystruct',
'imap_check',
'imap_clearflag_full',
'imap_close',
'imap_createmailbox',
'imap_delete',
'imap_deletemailbox',
'imap_errors',
'imap_expunge',
'imap_fetch_overview',
'imap_fetchbody',
'imap_fetchheader',
'imap_fetchmime',
'imap_fetchstructure',
'imap_gc',
'imap_get_quota',
'imap_get_quotaroot',
'imap_getacl',
'imap_getmailboxes',
'imap_getsubscribed',
'imap_header',
'imap_headerinfo',
'imap_headers',
'imap_last_error',
'imap_list',
'imap_listmailbox',
'imap_listscan',
'imap_listsubscribed',
'imap_lsub',
'imap_mail_compose',
'imap_mail_copy',
'imap_mail_move',
'imap_mail',
'imap_mailboxmsginfo',
'imap_mime_header_decode',
'imap_msgno',
'imap_num_msg',
'imap_num_recent',
'imap_open',
'imap_ping',
'imap_qprint',
'imap_renamemailbox',
'imap_reopen',
'imap_rfc822_parse_adrlist',
'imap_rfc822_parse_headers',
'imap_rfc822_write_address',
'imap_savebody',
'imap_scanmailbox',
'imap_search',
'imap_set_quota',
'imap_setacl',
'imap_setflag_full',
'imap_sort',
'imap_status',
'imap_subscribe',
'imap_thread',
'imap_timeout',
'imap_uid',
'imap_undelete',
'imap_unsubscribe',
'imap_utf7_decode',
'imap_utf7_encode',
'imap_utf8'],
'Informix': ['ifx_affected_rows',
'ifx_blobinfile_mode',
'ifx_byteasvarchar',
'ifx_close',
'ifx_connect',
'ifx_copy_blob',
'ifx_create_blob',
'ifx_create_char',
'ifx_do',
'ifx_error',
'ifx_errormsg',
'ifx_fetch_row',
'ifx_fieldproperties',
'ifx_fieldtypes',
'ifx_free_blob',
'ifx_free_char',
'ifx_free_result',
'ifx_get_blob',
'ifx_get_char',
'ifx_getsqlca',
'ifx_htmltbl_result',
'ifx_nullformat',
'ifx_num_fields',
'ifx_num_rows',
'ifx_pconnect',
'ifx_prepare',
'ifx_query',
'ifx_textasvarchar',
'ifx_update_blob',
'ifx_update_char',
'ifxus_close_slob',
'ifxus_create_slob',
'ifxus_free_slob',
'ifxus_open_slob',
'ifxus_read_slob',
'ifxus_seek_slob',
'ifxus_tell_slob',
'ifxus_write_slob'],
'Ingres': ['ingres_autocommit_state',
'ingres_autocommit',
'ingres_charset',
'ingres_close',
'ingres_commit',
'ingres_connect',
'ingres_cursor',
'ingres_errno',
'ingres_error',
'ingres_errsqlstate',
'ingres_escape_string',
'ingres_execute',
'ingres_fetch_array',
'ingres_fetch_assoc',
'ingres_fetch_object',
'ingres_fetch_proc_return',
'ingres_fetch_row',
'ingres_field_length',
'ingres_field_name',
'ingres_field_nullable',
'ingres_field_precision',
'ingres_field_scale',
'ingres_field_type',
'ingres_free_result',
'ingres_next_error',
'ingres_num_fields',
'ingres_num_rows',
'ingres_pconnect',
'ingres_prepare',
'ingres_query',
'ingres_result_seek',
'ingres_rollback',
'ingres_set_environment',
'ingres_unbuffered_query'],
'Inotify': ['inotify_add_watch',
'inotify_init',
'inotify_queue_len',
'inotify_read',
'inotify_rm_watch'],
'JSON': ['json_decode', 'json_encode', 'json_last_error'],
'Java': ['java_last_exception_clear', 'java_last_exception_get'],
'Judy': ['judy_type', 'judy_version'],
'KADM5': ['kadm5_chpass_principal',
'kadm5_create_principal',
'kadm5_delete_principal',
'kadm5_destroy',
'kadm5_flush',
'kadm5_get_policies',
'kadm5_get_principal',
'kadm5_get_principals',
'kadm5_init_with_password',
'kadm5_modify_principal'],
'LDAP': ['ldap_8859_to_t61',
'ldap_add',
'ldap_bind',
'ldap_close',
'ldap_compare',
'ldap_connect',
'ldap_count_entries',
'ldap_delete',
'ldap_dn2ufn',
'ldap_err2str',
'ldap_errno',
'ldap_error',
'ldap_explode_dn',
'ldap_first_attribute',
'ldap_first_entry',
'ldap_first_reference',
'ldap_free_result',
'ldap_get_attributes',
'ldap_get_dn',
'ldap_get_entries',
'ldap_get_option',
'ldap_get_values_len',
'ldap_get_values',
'ldap_list',
'ldap_mod_add',
'ldap_mod_del',
'ldap_mod_replace',
'ldap_modify',
'ldap_next_attribute',
'ldap_next_entry',
'ldap_next_reference',
'ldap_parse_reference',
'ldap_parse_result',
'ldap_read',
'ldap_rename',
'ldap_sasl_bind',
'ldap_search',
'ldap_set_option',
'ldap_set_rebind_proc',
'ldap_sort',
'ldap_start_tls',
'ldap_t61_to_8859',
'ldap_unbind'],
'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
'Libevent': ['event_add',
'event_base_free',
'event_base_loop',
'event_base_loopbreak',
'event_base_loopexit',
'event_base_new',
'event_base_priority_init',
'event_base_set',
'event_buffer_base_set',
'event_buffer_disable',
'event_buffer_enable',
'event_buffer_fd_set',
'event_buffer_free',
'event_buffer_new',
'event_buffer_priority_set',
'event_buffer_read',
'event_buffer_set_callback',
'event_buffer_timeout_set',
'event_buffer_watermark_set',
'event_buffer_write',
'event_del',
'event_free',
'event_new',
'event_set'],
'Lotus Notes': ['notes_body',
'notes_copy_db',
'notes_create_db',
'notes_create_note',
'notes_drop_db',
'notes_find_note',
'notes_header_info',
'notes_list_msgs',
'notes_mark_read',
'notes_mark_unread',
'notes_nav_create',
'notes_search',
'notes_unread',
'notes_version'],
'MCVE': ['m_checkstatus',
'm_completeauthorizations',
'm_connect',
'm_connectionerror',
'm_deletetrans',
'm_destroyconn',
'm_destroyengine',
'm_getcell',
'm_getcellbynum',
'm_getcommadelimited',
'm_getheader',
'm_initconn',
'm_initengine',
'm_iscommadelimited',
'm_maxconntimeout',
'm_monitor',
'm_numcolumns',
'm_numrows',
'm_parsecommadelimited',
'm_responsekeys'],
'Mail': ['ezmlm_hash', 'mail'],
'Mailparse': ['mailparse_determine_best_xfer_encoding',
'mailparse_msg_create',
'mailparse_msg_extract_part_file',
'mailparse_msg_extract_part',
'mailparse_msg_extract_whole_part_file',
'mailparse_msg_free',
'mailparse_msg_get_part_data',
'mailparse_msg_get_part',
'mailparse_msg_get_structure',
'mailparse_msg_parse_file',
'mailparse_msg_parse',
'mailparse_rfc822_parse_addresses',
'mailparse_stream_encode',
'mailparse_uudecode_all'],
'Math': ['abs',
'acos',
'acosh',
'asin',
'asinh',
'atan2',
'atan',
'atanh',
'base_convert',
'bindec',
'ceil',
'cos',
'cosh',
'decbin',
'dechex',
'decoct',
'deg2rad',
'exp',
'expm1'],
'MaxDB': ['maxdb_affected_rows',
'maxdb_autocommit',
'maxdb_bind_param',
'maxdb_bind_result',
'maxdb_change_user',
'maxdb_character_set_name',
'maxdb_client_encoding',
'maxdb_close_long_data',
'maxdb_close',
'maxdb_commit',
'maxdb_connect_errno',
'maxdb_connect_error',
'maxdb_connect',
'maxdb_data_seek',
'maxdb_debug',
'maxdb_disable_reads_from_master',
'maxdb_disable_rpl_parse',
'maxdb_dump_debug_info',
'maxdb_embedded_connect',
'maxdb_enable_reads_from_master',
'maxdb_enable_rpl_parse',
'maxdb_errno',
'maxdb_error',
'maxdb_escape_string',
'maxdb_execute',
'maxdb_fetch_array',
'maxdb_fetch_assoc',
'maxdb_fetch_field_direct',
'maxdb_fetch_field',
'maxdb_fetch_fields',
'maxdb_fetch_lengths',
'maxdb_fetch_object',
'maxdb_fetch_row',
'maxdb_fetch',
'maxdb_field_count',
'maxdb_field_seek',
'maxdb_field_tell',
'maxdb_free_result',
'maxdb_get_client_info',
'maxdb_get_client_version',
'maxdb_get_host_info',
'maxdb_get_metadata',
'maxdb_get_proto_info',
'maxdb_get_server_info',
'maxdb_get_server_version',
'maxdb_info',
'maxdb_init',
'maxdb_insert_id',
'maxdb_kill',
'maxdb_master_query',
'maxdb_more_results',
'maxdb_multi_query',
'maxdb_next_result',
'maxdb_num_fields',
'maxdb_num_rows',
'maxdb_options',
'maxdb_param_count',
'maxdb_ping',
'maxdb_prepare',
'maxdb_query',
'maxdb_real_connect',
'maxdb_real_escape_string',
'maxdb_real_query',
'maxdb_report',
'maxdb_rollback',
'maxdb_rpl_parse_enabled',
'maxdb_rpl_probe',
'maxdb_rpl_query_type',
'maxdb_select_db',
'maxdb_send_long_data',
'maxdb_send_query',
'maxdb_server_end',
'maxdb_server_init',
'maxdb_set_opt',
'maxdb_sqlstate',
'maxdb_ssl_set',
'maxdb_stat',
'maxdb_stmt_affected_rows'],
'Mcrypt': ['mcrypt_cbc',
'mcrypt_cfb',
'mcrypt_create_iv',
'mcrypt_decrypt',
'mcrypt_ecb',
'mcrypt_enc_get_algorithms_name',
'mcrypt_enc_get_block_size',
'mcrypt_enc_get_iv_size',
'mcrypt_enc_get_key_size',
'mcrypt_enc_get_modes_name',
'mcrypt_enc_get_supported_key_sizes',
'mcrypt_enc_is_block_algorithm_mode',
'mcrypt_enc_is_block_algorithm',
'mcrypt_enc_is_block_mode',
'mcrypt_enc_self_test',
'mcrypt_encrypt',
'mcrypt_generic_deinit',
'mcrypt_generic_end',
'mcrypt_generic_init',
'mcrypt_generic',
'mcrypt_get_block_size',
'mcrypt_get_cipher_name',
'mcrypt_get_iv_size',
'mcrypt_get_key_size',
'mcrypt_list_algorithms',
'mcrypt_list_modes',
'mcrypt_module_close',
'mcrypt_module_get_algo_block_size',
'mcrypt_module_get_algo_key_size',
'mcrypt_module_get_supported_key_sizes',
'mcrypt_module_is_block_algorithm_mode',
'mcrypt_module_is_block_algorithm',
'mcrypt_module_is_block_mode',
'mcrypt_module_open',
'mcrypt_module_self_test',
'mcrypt_ofb',
'mdecrypt_generic'],
'Memcache': ['memcache_debug'],
'Mhash': ['mhash_count',
'mhash_get_block_size',
'mhash_get_hash_name',
'mhash_keygen_s2k',
'mhash'],
'Ming': ['ming_keypress',
'ming_setcubicthreshold',
'ming_setscale',
'ming_setswfcompression',
'ming_useconstants',
'ming_useswfversion'],
'Misc.': ['connection_aborted',
'connection_status',
'connection_timeout',
'constant',
'define',
'defined',
'die',
'eval',
'exit',
'get_browser',
'__halt_compiler',
'highlight_file',
'highlight_string',
'ignore_user_abort',
'pack',
'php_check_syntax',
'php_strip_whitespace',
'show_source',
'sleep',
'sys_getloadavg',
'time_nanosleep',
'time_sleep_until',
'uniqid',
'unpack',
'usleep'],
'Mongo': ['bson_decode', 'bson_encode'],
'Msession': ['msession_connect',
'msession_count',
'msession_create',
'msession_destroy',
'msession_disconnect',
'msession_find',
'msession_get_array',
'msession_get_data',
'msession_get',
'msession_inc',
'msession_list',
'msession_listvar',
'msession_lock',
'msession_plugin',
'msession_randstr',
'msession_set_array',
'msession_set_data',
'msession_set',
'msession_timeout',
'msession_uniq',
'msession_unlock'],
'Mssql': ['mssql_bind',
'mssql_close',
'mssql_connect',
'mssql_data_seek',
'mssql_execute',
'mssql_fetch_array',
'mssql_fetch_assoc',
'mssql_fetch_batch',
'mssql_fetch_field',
'mssql_fetch_object',
'mssql_fetch_row',
'mssql_field_length',
'mssql_field_name',
'mssql_field_seek',
'mssql_field_type',
'mssql_free_result',
'mssql_free_statement',
'mssql_get_last_message',
'mssql_guid_string',
'mssql_init',
'mssql_min_error_severity',
'mssql_min_message_severity',
'mssql_next_result',
'mssql_num_fields',
'mssql_num_rows',
'mssql_pconnect',
'mssql_query',
'mssql_result',
'mssql_rows_affected',
'mssql_select_db'],
'Multibyte String': ['mb_check_encoding',
'mb_convert_case',
'mb_convert_encoding',
'mb_convert_kana',
'mb_convert_variables',
'mb_decode_mimeheader',
'mb_decode_numericentity',
'mb_detect_encoding',
'mb_detect_order',
'mb_encode_mimeheader',
'mb_encode_numericentity',
'mb_encoding_aliases',
'mb_ereg_match',
'mb_ereg_replace',
'mb_ereg_search_getpos',
'mb_ereg_search_getregs',
'mb_ereg_search_init',
'mb_ereg_search_pos',
'mb_ereg_search_regs',
'mb_ereg_search_setpos',
'mb_ereg_search',
'mb_ereg',
'mb_eregi_replace',
'mb_eregi',
'mb_get_info',
'mb_http_input',
'mb_http_output',
'mb_internal_encoding',
'mb_language',
'mb_list_encodings',
'mb_output_handler',
'mb_parse_str',
'mb_preferred_mime_name',
'mb_regex_encoding',
'mb_regex_set_options',
'mb_send_mail',
'mb_split',
'mb_strcut',
'mb_strimwidth',
'mb_stripos',
'mb_stristr',
'mb_strlen',
'mb_strpos',
'mb_strrchr',
'mb_strrichr',
'mb_strripos',
'mb_strrpos',
'mb_strstr',
'mb_strtolower',
'mb_strtoupper',
'mb_strwidth',
'mb_substitute_character',
'mb_substr_count',
'mb_substr'],
'MySQL': ['mysql_affected_rows',
'mysql_client_encoding',
'mysql_close',
'mysql_connect',
'mysql_create_db',
'mysql_data_seek',
'mysql_db_name',
'mysql_db_query',
'mysql_drop_db',
'mysql_errno',
'mysql_error',
'mysql_escape_string',
'mysql_fetch_array',
'mysql_fetch_assoc',
'mysql_fetch_field',
'mysql_fetch_lengths',
'mysql_fetch_object',
'mysql_fetch_row',
'mysql_field_flags',
'mysql_field_len',
'mysql_field_name',
'mysql_field_seek',
'mysql_field_table',
'mysql_field_type',
'mysql_free_result',
'mysql_get_client_info',
'mysql_get_host_info',
'mysql_get_proto_info',
'mysql_get_server_info',
'mysql_info',
'mysql_insert_id',
'mysql_list_dbs',
'mysql_list_fields',
'mysql_list_processes',
'mysql_list_tables',
'mysql_num_fields',
'mysql_num_rows',
'mysql_pconnect',
'mysql_ping',
'mysql_query',
'mysql_real_escape_string',
'mysql_result',
'mysql_select_db',
'mysql_set_charset',
'mysql_stat',
'mysql_tablename',
'mysql_thread_id',
'mysql_unbuffered_query'],
'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
'Ncurses': ['ncurses_addch',
'ncurses_addchnstr',
'ncurses_addchstr',
'ncurses_addnstr',
'ncurses_addstr',
'ncurses_assume_default_colors',
'ncurses_attroff',
'ncurses_attron',
'ncurses_attrset',
'ncurses_baudrate',
'ncurses_beep',
'ncurses_bkgd',
'ncurses_bkgdset',
'ncurses_border',
'ncurses_bottom_panel',
'ncurses_can_change_color',
'ncurses_cbreak',
'ncurses_clear',
'ncurses_clrtobot',
'ncurses_clrtoeol',
'ncurses_color_content',
'ncurses_color_set',
'ncurses_curs_set',
'ncurses_def_prog_mode',
'ncurses_def_shell_mode',
'ncurses_define_key',
'ncurses_del_panel',
'ncurses_delay_output',
'ncurses_delch',
'ncurses_deleteln',
'ncurses_delwin',
'ncurses_doupdate',
'ncurses_echo',
'ncurses_echochar',
'ncurses_end',
'ncurses_erase',
'ncurses_erasechar',
'ncurses_filter',
'ncurses_flash',
'ncurses_flushinp',
'ncurses_getch',
'ncurses_getmaxyx',
'ncurses_getmouse',
'ncurses_getyx',
'ncurses_halfdelay',
'ncurses_has_colors',
'ncurses_has_ic',
'ncurses_has_il',
'ncurses_has_key',
'ncurses_hide_panel',
'ncurses_hline',
'ncurses_inch',
'ncurses_init_color',
'ncurses_init_pair',
'ncurses_init',
'ncurses_insch',
'ncurses_insdelln',
'ncurses_insertln',
'ncurses_insstr',
'ncurses_instr',
'ncurses_isendwin',
'ncurses_keyok',
'ncurses_keypad',
'ncurses_killchar',
'ncurses_longname',
'ncurses_meta',
'ncurses_mouse_trafo',
'ncurses_mouseinterval',
'ncurses_mousemask',
'ncurses_move_panel',
'ncurses_move',
'ncurses_mvaddch',
'ncurses_mvaddchnstr',
'ncurses_mvaddchstr',
'ncurses_mvaddnstr',
'ncurses_mvaddstr',
'ncurses_mvcur',
'ncurses_mvdelch',
'ncurses_mvgetch',
'ncurses_mvhline',
'ncurses_mvinch',
'ncurses_mvvline',
'ncurses_mvwaddstr',
'ncurses_napms',
'ncurses_new_panel',
'ncurses_newpad',
'ncurses_newwin',
'ncurses_nl',
'ncurses_nocbreak',
'ncurses_noecho',
'ncurses_nonl',
'ncurses_noqiflush',
'ncurses_noraw',
'ncurses_pair_content',
'ncurses_panel_above',
'ncurses_panel_below',
'ncurses_panel_window',
'ncurses_pnoutrefresh',
'ncurses_prefresh',
'ncurses_putp',
'ncurses_qiflush',
'ncurses_raw',
'ncurses_refresh',
'ncurses_replace_panel',
'ncurses_reset_prog_mode',
'ncurses_reset_shell_mode',
'ncurses_resetty',
'ncurses_savetty',
'ncurses_scr_dump',
'ncurses_scr_init',
'ncurses_scr_restore',
'ncurses_scr_set',
'ncurses_scrl',
'ncurses_show_panel',
'ncurses_slk_attr',
'ncurses_slk_attroff',
'ncurses_slk_attron',
'ncurses_slk_attrset',
'ncurses_slk_clear',
'ncurses_slk_color',
'ncurses_slk_init',
'ncurses_slk_noutrefresh',
'ncurses_slk_refresh',
'ncurses_slk_restore',
'ncurses_slk_set',
'ncurses_slk_touch',
'ncurses_standend',
'ncurses_standout',
'ncurses_start_color',
'ncurses_termattrs',
'ncurses_termname',
'ncurses_timeout',
'ncurses_top_panel',
'ncurses_typeahead',
'ncurses_ungetch',
'ncurses_ungetmouse',
'ncurses_update_panels',
'ncurses_use_default_colors',
'ncurses_use_env',
'ncurses_use_extended_names',
'ncurses_vidattr',
'ncurses_vline',
'ncurses_waddch',
'ncurses_waddstr',
'ncurses_wattroff',
'ncurses_wattron',
'ncurses_wattrset',
'ncurses_wborder',
'ncurses_wclear',
'ncurses_wcolor_set',
'ncurses_werase',
'ncurses_wgetch',
'ncurses_whline',
'ncurses_wmouse_trafo',
'ncurses_wmove',
'ncurses_wnoutrefresh',
'ncurses_wrefresh',
'ncurses_wstandend',
'ncurses_wstandout',
'ncurses_wvline'],
'Network': ['checkdnsrr',
'closelog',
'define_syslog_variables',
'dns_check_record',
'dns_get_mx',
'dns_get_record',
'fsockopen',
'gethostbyaddr',
'gethostbyname',
'gethostbynamel'],
'Newt': ['newt_bell',
'newt_button_bar',
'newt_button',
'newt_centered_window',
'newt_checkbox_get_value',
'newt_checkbox_set_flags',
'newt_checkbox_set_value',
'newt_checkbox_tree_add_item',
'newt_checkbox_tree_find_item',
'newt_checkbox_tree_get_current',
'newt_checkbox_tree_get_entry_value',
'newt_checkbox_tree_get_multi_selection',
'newt_checkbox_tree_get_selection',
'newt_checkbox_tree_multi',
'newt_checkbox_tree_set_current',
'newt_checkbox_tree_set_entry_value',
'newt_checkbox_tree_set_entry',
'newt_checkbox_tree_set_width',
'newt_checkbox_tree',
'newt_checkbox',
'newt_clear_key_buffer'],
'OAuth': ['oauth_get_sbs', 'oauth_urlencode'],
'OCI8': ['oci_bind_array_by_name',
'oci_bind_by_name',
'oci_cancel',
'oci_close',
'oci_commit',
'oci_connect',
'oci_define_by_name',
'oci_error',
'oci_execute',
'oci_fetch_all',
'oci_fetch_array',
'oci_fetch_assoc',
'oci_fetch_object',
'oci_fetch_row',
'oci_fetch',
'oci_field_is_null',
'oci_field_name',
'oci_field_precision',
'oci_field_scale',
'oci_field_size',
'oci_field_type_raw',
'oci_field_type',
'oci_free_statement',
'oci_internal_debug',
'oci_lob_copy',
'oci_lob_is_equal',
'oci_new_collection',
'oci_new_connect',
'oci_new_cursor',
'oci_new_descriptor',
'oci_num_fields',
'oci_num_rows',
'oci_parse',
'oci_password_change',
'oci_pconnect',
'oci_result',
'oci_rollback',
'oci_server_version',
'oci_set_action',
'oci_set_client_identifier',
'oci_set_client_info',
'oci_set_edition',
'oci_set_module_name',
'oci_set_prefetch',
'oci_statement_type'],
'ODBC': ['odbc_autocommit',
'odbc_binmode',
'odbc_close_all',
'odbc_close',
'odbc_columnprivileges',
'odbc_columns',
'odbc_commit',
'odbc_connect',
'odbc_cursor',
'odbc_data_source',
'odbc_do',
'odbc_error',
'odbc_errormsg',
'odbc_exec',
'odbc_execute',
'odbc_fetch_array',
'odbc_fetch_into',
'odbc_fetch_object',
'odbc_fetch_row',
'odbc_field_len',
'odbc_field_name',
'odbc_field_num',
'odbc_field_precision',
'odbc_field_scale',
'odbc_field_type',
'odbc_foreignkeys',
'odbc_free_result',
'odbc_gettypeinfo',
'odbc_longreadlen',
'odbc_next_result',
'odbc_num_fields',
'odbc_num_rows',
'odbc_pconnect',
'odbc_prepare',
'odbc_primarykeys',
'odbc_procedurecolumns',
'odbc_procedures',
'odbc_result_all',
'odbc_result',
'odbc_rollback',
'odbc_setoption',
'odbc_specialcolumns',
'odbc_statistics',
'odbc_tableprivileges',
'odbc_tables'],
'Object Aggregation': ['aggregate_info',
'aggregate_methods_by_list',
'aggregate_methods_by_regexp'],
'Object overloading': ['overload'],
'OpenAL': ['openal_buffer_create',
'openal_buffer_data',
'openal_buffer_destroy',
'openal_buffer_get',
'openal_buffer_loadwav',
'openal_context_create',
'openal_context_current',
'openal_context_destroy',
'openal_context_process',
'openal_context_suspend',
'openal_device_close',
'openal_device_open',
'openal_listener_get',
'openal_listener_set',
'openal_source_create',
'openal_source_destroy',
'openal_source_get',
'openal_source_pause',
'openal_source_play',
'openal_source_rewind',
'openal_source_set',
'openal_source_stop',
'openal_stream'],
'OpenSSL': ['openssl_csr_export_to_file',
'openssl_csr_export',
'openssl_csr_get_public_key',
'openssl_csr_get_subject',
'openssl_csr_new',
'openssl_csr_sign',
'openssl_decrypt',
'openssl_dh_compute_key',
'openssl_digest',
'openssl_encrypt',
'openssl_error_string',
'openssl_free_key',
'openssl_get_cipher_methods',
'openssl_get_md_methods',
'openssl_get_privatekey',
'openssl_get_publickey',
'openssl_open',
'openssl_pkcs12_export_to_file',
'openssl_pkcs12_export',
'openssl_pkcs12_read',
'openssl_pkcs7_decrypt',
'openssl_pkcs7_encrypt',
'openssl_pkcs7_sign',
'openssl_pkcs7_verify',
'openssl_pkey_export_to_file',
'openssl_pkey_export',
'openssl_pkey_free',
'openssl_pkey_get_details',
'openssl_pkey_get_private',
'openssl_pkey_get_public',
'openssl_pkey_new',
'openssl_private_decrypt',
'openssl_private_encrypt',
'openssl_public_decrypt',
'openssl_public_encrypt',
'openssl_random_pseudo_bytes',
'openssl_seal',
'openssl_sign',
'openssl_verify',
'openssl_x509_check_private_key',
'openssl_x509_checkpurpose',
'openssl_x509_export_to_file',
'openssl_x509_export',
'openssl_x509_free',
'openssl_x509_parse',
'openssl_x509_read'],
'Output Control': ['flush',
'ob_clean',
'ob_end_clean',
'ob_end_flush',
'ob_flush',
'ob_get_clean',
'ob_get_contents',
'ob_get_flush',
'ob_get_length',
'ob_get_level',
'ob_get_status',
'ob_gzhandler',
'ob_implicit_flush',
'ob_list_handlers',
'ob_start',
'output_add_rewrite_var',
'output_reset_rewrite_vars'],
'Ovrimos SQL': ['ovrimos_close',
'ovrimos_commit',
'ovrimos_connect',
'ovrimos_cursor',
'ovrimos_exec',
'ovrimos_execute',
'ovrimos_fetch_into',
'ovrimos_fetch_row',
'ovrimos_field_len',
'ovrimos_field_name',
'ovrimos_field_num',
'ovrimos_field_type',
'ovrimos_free_result',
'ovrimos_longreadlen',
'ovrimos_num_fields',
'ovrimos_num_rows',
'ovrimos_prepare',
'ovrimos_result_all',
'ovrimos_result',
'ovrimos_rollback'],
'PCNTL': ['pcntl_alarm',
'pcntl_exec',
'pcntl_fork',
'pcntl_getpriority',
'pcntl_setpriority',
'pcntl_signal_dispatch',
'pcntl_signal',
'pcntl_sigprocmask',
'pcntl_sigtimedwait',
'pcntl_sigwaitinfo',
'pcntl_wait',
'pcntl_waitpid',
'pcntl_wexitstatus',
'pcntl_wifexited',
'pcntl_wifsignaled',
'pcntl_wifstopped',
'pcntl_wstopsig',
'pcntl_wtermsig'],
'PCRE': ['preg_filter',
'preg_grep',
'preg_last_error',
'preg_match_all',
'preg_match',
'preg_quote',
'preg_replace_callback',
'preg_replace',
'preg_split'],
'PDF': ['PDF_activate_item',
'PDF_add_annotation',
'PDF_add_bookmark',
'PDF_add_launchlink',
'PDF_add_locallink',
'PDF_add_nameddest',
'PDF_add_note',
'PDF_add_outline',
'PDF_add_pdflink',
'PDF_add_table_cell',
'PDF_add_textflow',
'PDF_add_thumbnail',
'PDF_add_weblink',
'PDF_arc',
'PDF_arcn',
'PDF_attach_file',
'PDF_begin_document',
'PDF_begin_font',
'PDF_begin_glyph',
'PDF_begin_item',
'PDF_begin_layer',
'PDF_begin_page_ext',
'PDF_begin_page',
'PDF_begin_pattern',
'PDF_begin_template_ext',
'PDF_begin_template',
'PDF_circle',
'PDF_clip',
'PDF_close_image',
'PDF_close_pdi_page',
'PDF_close_pdi',
'PDF_close',
'PDF_closepath_fill_stroke',
'PDF_closepath_stroke',
'PDF_closepath',
'PDF_concat',
'PDF_continue_text',
'PDF_create_3dview',
'PDF_create_action',
'PDF_create_annotation',
'PDF_create_bookmark',
'PDF_create_field',
'PDF_create_fieldgroup',
'PDF_create_gstate',
'PDF_create_pvf',
'PDF_create_textflow',
'PDF_curveto',
'PDF_define_layer',
'PDF_delete_pvf',
'PDF_delete_table',
'PDF_delete_textflow',
'PDF_delete',
'PDF_encoding_set_char',
'PDF_end_document',
'PDF_end_font',
'PDF_end_glyph',
'PDF_end_item',
'PDF_end_layer',
'PDF_end_page_ext',
'PDF_end_page',
'PDF_end_pattern',
'PDF_end_template',
'PDF_endpath',
'PDF_fill_imageblock',
'PDF_fill_pdfblock',
'PDF_fill_stroke',
'PDF_fill_textblock',
'PDF_fill',
'PDF_findfont',
'PDF_fit_image',
'PDF_fit_pdi_page',
'PDF_fit_table',
'PDF_fit_textflow',
'PDF_fit_textline',
'PDF_get_apiname',
'PDF_get_buffer',
'PDF_get_errmsg',
'PDF_get_errnum',
'PDF_get_font',
'PDF_get_fontname',
'PDF_get_fontsize',
'PDF_get_image_height',
'PDF_get_image_width',
'PDF_get_majorversion',
'PDF_get_minorversion',
'PDF_get_parameter',
'PDF_get_pdi_parameter',
'PDF_get_pdi_value',
'PDF_get_value',
'PDF_info_font',
'PDF_info_matchbox',
'PDF_info_table',
'PDF_info_textflow',
'PDF_info_textline',
'PDF_initgraphics',
'PDF_lineto',
'PDF_load_3ddata',
'PDF_load_font',
'PDF_load_iccprofile',
'PDF_load_image',
'PDF_makespotcolor',
'PDF_moveto',
'PDF_new',
'PDF_open_ccitt',
'PDF_open_file',
'PDF_open_gif',
'PDF_open_image_file',
'PDF_open_image',
'PDF_open_jpeg',
'PDF_open_memory_image',
'PDF_open_pdi_document',
'PDF_open_pdi_page',
'PDF_open_pdi',
'PDF_open_tiff',
'PDF_pcos_get_number',
'PDF_pcos_get_stream',
'PDF_pcos_get_string',
'PDF_place_image',
'PDF_place_pdi_page',
'PDF_process_pdi',
'PDF_rect',
'PDF_restore',
'PDF_resume_page',
'PDF_rotate',
'PDF_save',
'PDF_scale',
'PDF_set_border_color',
'PDF_set_border_dash',
'PDF_set_border_style',
'PDF_set_char_spacing',
'PDF_set_duration',
'PDF_set_gstate',
'PDF_set_horiz_scaling',
'PDF_set_info_author',
'PDF_set_info_creator',
'PDF_set_info_keywords',
'PDF_set_info_subject',
'PDF_set_info_title',
'PDF_set_info',
'PDF_set_layer_dependency',
'PDF_set_leading',
'PDF_set_parameter',
'PDF_set_text_matrix',
'PDF_set_text_pos',
'PDF_set_text_rendering',
'PDF_set_text_rise',
'PDF_set_value',
'PDF_set_word_spacing',
'PDF_setcolor',
'PDF_setdash',
'PDF_setdashpattern',
'PDF_setflat',
'PDF_setfont',
'PDF_setgray_fill',
'PDF_setgray_stroke',
'PDF_setgray',
'PDF_setlinecap',
'PDF_setlinejoin',
'PDF_setlinewidth',
'PDF_setmatrix',
'PDF_setmiterlimit',
'PDF_setpolydash',
'PDF_setrgbcolor_fill',
'PDF_setrgbcolor_stroke',
'PDF_setrgbcolor',
'PDF_shading_pattern',
'PDF_shading',
'PDF_shfill',
'PDF_show_boxed',
'PDF_show_xy',
'PDF_show',
'PDF_skew',
'PDF_stringwidth',
'PDF_stroke',
'PDF_suspend_page',
'PDF_translate',
'PDF_utf16_to_utf8',
'PDF_utf32_to_utf16',
'PDF_utf8_to_utf16'],
'PHP Options/Info': ['assert_options',
'assert',
'dl',
'extension_loaded',
'gc_collect_cycles',
'gc_disable',
'gc_enable',
'gc_enabled',
'get_cfg_var',
'get_current_user',
'get_defined_constants',
'get_extension_funcs',
'get_include_path',
'get_included_files',
'get_loaded_extensions',
'get_magic_quotes_gpc',
'get_magic_quotes_runtime',
'get_required_files',
'getenv',
'getlastmod',
'getmygid',
'getmyinode',
'getmypid',
'getmyuid',
'getopt',
'getrusage',
'ini_alter',
'ini_get_all',
'ini_get',
'ini_restore',
'ini_set',
'magic_quotes_runtime',
'memory_get_peak_usage',
'memory_get_usage',
'php_ini_loaded_file',
'php_ini_scanned_files',
'php_logo_guid',
'php_sapi_name',
'php_uname',
'phpcredits',
'phpinfo',
'phpversion',
'putenv',
'restore_include_path',
'set_include_path',
'set_magic_quotes_runtime',
'set_time_limit',
'sys_get_temp_dir',
'version_compare',
'zend_logo_guid',
'zend_thread_id',
'zend_version'],
'POSIX': ['posix_access',
'posix_ctermid',
'posix_errno',
'posix_get_last_error',
'posix_getcwd',
'posix_getegid',
'posix_geteuid',
'posix_getgid',
'posix_getgrgid',
'posix_getgrnam',
'posix_getgroups',
'posix_getlogin',
'posix_getpgid',
'posix_getpgrp',
'posix_getpid',
'posix_getppid',
'posix_getpwnam',
'posix_getpwuid',
'posix_getrlimit',
'posix_getsid',
'posix_getuid',
'posix_initgroups',
'posix_isatty',
'posix_kill',
'posix_mkfifo',
'posix_mknod',
'posix_setegid',
'posix_seteuid',
'posix_setgid',
'posix_setpgid',
'posix_setsid',
'posix_setuid',
'posix_strerror',
'posix_times',
'posix_ttyname',
'posix_uname'],
'POSIX Regex': ['ereg_replace',
'ereg',
'eregi_replace',
'eregi',
'split',
'spliti',
'sql_regcase'],
'PS': ['ps_add_bookmark',
'ps_add_launchlink',
'ps_add_locallink',
'ps_add_note',
'ps_add_pdflink',
'ps_add_weblink',
'ps_arc',
'ps_arcn',
'ps_begin_page',
'ps_begin_pattern',
'ps_begin_template',
'ps_circle',
'ps_clip',
'ps_close_image',
'ps_close',
'ps_closepath_stroke',
'ps_closepath',
'ps_continue_text',
'ps_curveto',
'ps_delete',
'ps_end_page',
'ps_end_pattern',
'ps_end_template',
'ps_fill_stroke',
'ps_fill',
'ps_findfont',
'ps_get_buffer',
'ps_get_parameter',
'ps_get_value',
'ps_hyphenate',
'ps_include_file',
'ps_lineto',
'ps_makespotcolor',
'ps_moveto',
'ps_new',
'ps_open_file',
'ps_open_image_file',
'ps_open_image',
'ps_open_memory_image',
'ps_place_image',
'ps_rect',
'ps_restore',
'ps_rotate',
'ps_save',
'ps_scale',
'ps_set_border_color',
'ps_set_border_dash',
'ps_set_border_style',
'ps_set_info',
'ps_set_parameter',
'ps_set_text_pos',
'ps_set_value',
'ps_setcolor',
'ps_setdash',
'ps_setflat',
'ps_setfont',
'ps_setgray',
'ps_setlinecap',
'ps_setlinejoin',
'ps_setlinewidth',
'ps_setmiterlimit',
'ps_setoverprintmode',
'ps_setpolydash',
'ps_shading_pattern',
'ps_shading',
'ps_shfill',
'ps_show_boxed',
'ps_show_xy2',
'ps_show_xy',
'ps_show2',
'ps_show',
'ps_string_geometry',
'ps_stringwidth',
'ps_stroke',
'ps_symbol_name',
'ps_symbol_width',
'ps_symbol',
'ps_translate'],
'Paradox': ['px_close',
'px_create_fp',
'px_date2string',
'px_delete_record',
'px_delete',
'px_get_field',
'px_get_info',
'px_get_parameter',
'px_get_record',
'px_get_schema',
'px_get_value',
'px_insert_record',
'px_new',
'px_numfields',
'px_numrecords',
'px_open_fp',
'px_put_record',
'px_retrieve_record',
'px_set_blob_file',
'px_set_parameter',
'px_set_tablename',
'px_set_targetencoding',
'px_set_value',
'px_timestamp2string',
'px_update_record'],
'Parsekit': ['parsekit_compile_file',
'parsekit_compile_string',
'parsekit_func_arginfo'],
'PostgreSQL': ['pg_affected_rows',
'pg_cancel_query',
'pg_client_encoding',
'pg_close',
'pg_connect',
'pg_connection_busy',
'pg_connection_reset',
'pg_connection_status',
'pg_convert',
'pg_copy_from',
'pg_copy_to',
'pg_dbname',
'pg_delete',
'pg_end_copy',
'pg_escape_bytea',
'pg_escape_string',
'pg_execute',
'pg_fetch_all_columns',
'pg_fetch_all',
'pg_fetch_array',
'pg_fetch_assoc',
'pg_fetch_object',
'pg_fetch_result',
'pg_fetch_row',
'pg_field_is_null',
'pg_field_name',
'pg_field_num',
'pg_field_prtlen',
'pg_field_size',
'pg_field_table',
'pg_field_type_oid',
'pg_field_type',
'pg_free_result',
'pg_get_notify',
'pg_get_pid',
'pg_get_result',
'pg_host',
'pg_insert',
'pg_last_error',
'pg_last_notice',
'pg_last_oid',
'pg_lo_close',
'pg_lo_create',
'pg_lo_export',
'pg_lo_import',
'pg_lo_open',
'pg_lo_read_all',
'pg_lo_read',
'pg_lo_seek',
'pg_lo_tell',
'pg_lo_unlink',
'pg_lo_write',
'pg_meta_data',
'pg_num_fields',
'pg_num_rows',
'pg_options',
'pg_parameter_status',
'pg_pconnect',
'pg_ping',
'pg_port',
'pg_prepare'],
'Printer': ['printer_abort',
'printer_close',
'printer_create_brush',
'printer_create_dc',
'printer_create_font',
'printer_create_pen',
'printer_delete_brush',
'printer_delete_dc',
'printer_delete_font',
'printer_delete_pen',
'printer_draw_bmp',
'printer_draw_chord',
'printer_draw_elipse',
'printer_draw_line',
'printer_draw_pie',
'printer_draw_rectangle',
'printer_draw_roundrect',
'printer_draw_text',
'printer_end_doc',
'printer_end_page',
'printer_get_option',
'printer_list',
'printer_logical_fontheight',
'printer_open',
'printer_select_brush',
'printer_select_font',
'printer_select_pen',
'printer_set_option',
'printer_start_doc',
'printer_start_page',
'printer_write'],
'Program execution': ['escapeshellarg',
'escapeshellcmd',
'exec',
'passthru',
'proc_close',
'proc_get_status',
'proc_nice',
'proc_open',
'proc_terminate',
'shell_exec',
'system'],
'Pspell': ['pspell_add_to_personal',
'pspell_add_to_session',
'pspell_check',
'pspell_clear_session',
'pspell_config_create',
'pspell_config_data_dir',
'pspell_config_dict_dir',
'pspell_config_ignore',
'pspell_config_mode',
'pspell_config_personal',
'pspell_config_repl',
'pspell_config_runtogether',
'pspell_config_save_repl'],
'RPM Reader': ['rpm_close',
'rpm_get_tag',
'rpm_is_valid',
'rpm_open',
'rpm_version'],
'RRD': ['rrd_create',
'rrd_error',
'rrd_fetch',
'rrd_first',
'rrd_graph',
'rrd_info',
'rrd_last',
'rrd_lastupdate',
'rrd_restore',
'rrd_tune',
'rrd_update',
'rrd_xport'],
'Radius': ['radius_acct_open',
'radius_add_server',
'radius_auth_open',
'radius_close',
'radius_config',
'radius_create_request',
'radius_cvt_addr',
'radius_cvt_int',
'radius_cvt_string',
'radius_demangle_mppe_key',
'radius_demangle',
'radius_get_attr',
'radius_get_vendor_attr',
'radius_put_addr',
'radius_put_attr',
'radius_put_int',
'radius_put_string',
'radius_put_vendor_addr',
'radius_put_vendor_attr',
'radius_put_vendor_int',
'radius_put_vendor_string',
'radius_request_authenticator',
'radius_send_request',
'radius_server_secret',
'radius_strerror'],
'Rar': ['rar_wrapper_cache_stats'],
'Readline': ['readline_add_history',
'readline_callback_handler_install',
'readline_callback_handler_remove',
'readline_callback_read_char',
'readline_clear_history',
'readline_completion_function',
'readline_info',
'readline_list_history',
'readline_on_new_line',
'readline_read_history',
'readline_redisplay',
'readline_write_history',
'readline'],
'Recode': ['recode_file', 'recode_string', 'recode'],
'SNMP': ['snmp_get_quick_print',
'snmp_get_valueretrieval',
'snmp_read_mib',
'snmp_set_enum_print',
'snmp_set_oid_numeric_print',
'snmp_set_oid_output_format',
'snmp_set_quick_print',
'snmp_set_valueretrieval',
'snmp2_get',
'snmp2_getnext',
'snmp2_real_walk',
'snmp2_set',
'snmp2_walk',
'snmp3_get',
'snmp3_getnext',
'snmp3_real_walk',
'snmp3_set',
'snmp3_walk',
'snmpget',
'snmpgetnext',
'snmprealwalk',
'snmpset',
'snmpwalk',
'snmpwalkoid'],
'SOAP': ['is_soap_fault', 'use_soap_error_handler'],
'SPL': ['class_implements',
'class_parents',
'iterator_apply',
'iterator_count',
'iterator_to_array',
'spl_autoload_call',
'spl_autoload_extensions',
'spl_autoload_functions',
'spl_autoload_register',
'spl_autoload_unregister',
'spl_autoload',
'spl_classes',
'spl_object_hash'],
'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'],
'SQLite': ['sqlite_array_query', 'sqlite_busy_timeout', 'sqlite_changes'],
'SSH2': ['ssh2_auth_hostbased_file',
'ssh2_auth_none',
'ssh2_auth_password',
'ssh2_auth_pubkey_file',
'ssh2_connect',
'ssh2_exec',
'ssh2_fetch_stream',
'ssh2_fingerprint',
'ssh2_methods_negotiated',
'ssh2_publickey_add',
'ssh2_publickey_init',
'ssh2_publickey_list',
'ssh2_publickey_remove',
'ssh2_scp_recv',
'ssh2_scp_send',
'ssh2_sftp_lstat',
'ssh2_sftp_mkdir',
'ssh2_sftp_readlink',
'ssh2_sftp_realpath',
'ssh2_sftp_rename',
'ssh2_sftp_rmdir',
'ssh2_sftp_stat',
'ssh2_sftp_symlink',
'ssh2_sftp_unlink',
'ssh2_sftp',
'ssh2_shell',
'ssh2_tunnel'],
'SVN': ['svn_add',
'svn_auth_get_parameter',
'svn_auth_set_parameter',
'svn_blame',
'svn_cat',
'svn_checkout',
'svn_cleanup',
'svn_client_version',
'svn_commit',
'svn_delete',
'svn_diff',
'svn_export',
'svn_fs_abort_txn',
'svn_fs_apply_text',
'svn_fs_begin_txn2',
'svn_fs_change_node_prop',
'svn_fs_check_path',
'svn_fs_contents_changed',
'svn_fs_copy',
'svn_fs_delete',
'svn_fs_dir_entries',
'svn_fs_file_contents',
'svn_fs_file_length',
'svn_fs_is_dir',
'svn_fs_is_file',
'svn_fs_make_dir',
'svn_fs_make_file',
'svn_fs_node_created_rev',
'svn_fs_node_prop',
'svn_fs_props_changed',
'svn_fs_revision_prop',
'svn_fs_revision_root',
'svn_fs_txn_root',
'svn_fs_youngest_rev',
'svn_import',
'svn_log',
'svn_ls',
'svn_mkdir',
'svn_repos_create',
'svn_repos_fs_begin_txn_for_commit',
'svn_repos_fs_commit_txn',
'svn_repos_fs',
'svn_repos_hotcopy',
'svn_repos_open',
'svn_repos_recover',
'svn_revert',
'svn_status',
'svn_update'],
'SWF': ['swf_actiongeturl',
'swf_actiongotoframe',
'swf_actiongotolabel',
'swf_actionnextframe',
'swf_actionplay',
'swf_actionprevframe',
'swf_actionsettarget',
'swf_actionstop',
'swf_actiontogglequality',
'swf_actionwaitforframe',
'swf_addbuttonrecord',
'swf_addcolor',
'swf_closefile',
'swf_definebitmap',
'swf_definefont',
'swf_defineline',
'swf_definepoly',
'swf_definerect',
'swf_definetext',
'swf_endbutton',
'swf_enddoaction',
'swf_endshape',
'swf_endsymbol',
'swf_fontsize',
'swf_fontslant',
'swf_fonttracking',
'swf_getbitmapinfo',
'swf_getfontinfo',
'swf_getframe',
'swf_labelframe',
'swf_lookat',
'swf_modifyobject',
'swf_mulcolor',
'swf_nextid',
'swf_oncondition',
'swf_openfile',
'swf_ortho2',
'swf_ortho',
'swf_perspective',
'swf_placeobject',
'swf_polarview',
'swf_popmatrix',
'swf_posround',
'swf_pushmatrix',
'swf_removeobject',
'swf_rotate',
'swf_scale',
'swf_setfont',
'swf_setframe',
'swf_shapearc',
'swf_shapecurveto3',
'swf_shapecurveto',
'swf_shapefillbitmapclip',
'swf_shapefillbitmaptile',
'swf_shapefilloff',
'swf_shapefillsolid',
'swf_shapelinesolid',
'swf_shapelineto',
'swf_shapemoveto',
'swf_showframe',
'swf_startbutton',
'swf_startdoaction',
'swf_startshape',
'swf_startsymbol',
'swf_textwidth',
'swf_translate',
'swf_viewport'],
'Semaphore': ['ftok',
'msg_get_queue',
'msg_queue_exists',
'msg_receive',
'msg_remove_queue',
'msg_send',
'msg_set_queue',
'msg_stat_queue',
'sem_acquire',
'sem_get',
'sem_release',
'sem_remove',
'shm_attach',
'shm_detach',
'shm_get_var',
'shm_has_var',
'shm_put_var',
'shm_remove_var',
'shm_remove'],
'Session': ['session_cache_expire',
'session_cache_limiter',
'session_commit',
'session_decode',
'session_destroy',
'session_encode',
'session_get_cookie_params',
'session_id',
'session_is_registered',
'session_module_name',
'session_name',
'session_regenerate_id',
'session_register',
'session_save_path',
'session_set_cookie_params',
'session_set_save_handler',
'session_start',
'session_unregister',
'session_unset',
'session_write_close'],
'Session PgSQL': ['session_pgsql_add_error',
'session_pgsql_get_error',
'session_pgsql_get_field',
'session_pgsql_reset',
'session_pgsql_set_field',
'session_pgsql_status'],
'Shared Memory': ['shmop_close',
'shmop_delete',
'shmop_open',
'shmop_read',
'shmop_size',
'shmop_write'],
'SimpleXML': ['simplexml_import_dom',
'simplexml_load_file',
'simplexml_load_string'],
'Socket': ['socket_accept',
'socket_bind',
'socket_clear_error',
'socket_close',
'socket_connect',
'socket_create_listen',
'socket_create_pair',
'socket_create',
'socket_get_option',
'socket_getpeername',
'socket_getsockname',
'socket_last_error',
'socket_listen',
'socket_read',
'socket_recv',
'socket_recvfrom',
'socket_select',
'socket_send',
'socket_sendto',
'socket_set_block',
'socket_set_nonblock',
'socket_set_option',
'socket_shutdown',
'socket_strerror',
'socket_write'],
'Solr': ['solr_get_version'],
'Statistic': ['stats_absolute_deviation',
'stats_cdf_beta',
'stats_cdf_binomial',
'stats_cdf_cauchy',
'stats_cdf_chisquare',
'stats_cdf_exponential',
'stats_cdf_f',
'stats_cdf_gamma',
'stats_cdf_laplace',
'stats_cdf_logistic',
'stats_cdf_negative_binomial',
'stats_cdf_noncentral_chisquare',
'stats_cdf_noncentral_f',
'stats_cdf_poisson',
'stats_cdf_t',
'stats_cdf_uniform',
'stats_cdf_weibull',
'stats_covariance',
'stats_den_uniform',
'stats_dens_beta',
'stats_dens_cauchy',
'stats_dens_chisquare',
'stats_dens_exponential',
'stats_dens_f',
'stats_dens_gamma',
'stats_dens_laplace',
'stats_dens_logistic',
'stats_dens_negative_binomial',
'stats_dens_normal',
'stats_dens_pmf_binomial',
'stats_dens_pmf_hypergeometric',
'stats_dens_pmf_poisson',
'stats_dens_t',
'stats_dens_weibull',
'stats_harmonic_mean',
'stats_kurtosis',
'stats_rand_gen_beta',
'stats_rand_gen_chisquare',
'stats_rand_gen_exponential',
'stats_rand_gen_f',
'stats_rand_gen_funiform',
'stats_rand_gen_gamma',
'stats_rand_gen_ibinomial_negative',
'stats_rand_gen_ibinomial',
'stats_rand_gen_int',
'stats_rand_gen_ipoisson',
'stats_rand_gen_iuniform',
'stats_rand_gen_noncenral_chisquare',
'stats_rand_gen_noncentral_f',
'stats_rand_gen_noncentral_t',
'stats_rand_gen_normal',
'stats_rand_gen_t',
'stats_rand_get_seeds',
'stats_rand_phrase_to_seeds',
'stats_rand_ranf',
'stats_rand_setall',
'stats_skew',
'stats_standard_deviation',
'stats_stat_binomial_coef',
'stats_stat_correlation',
'stats_stat_gennch',
'stats_stat_independent_t',
'stats_stat_innerproduct',
'stats_stat_noncentral_t',
'stats_stat_paired_t',
'stats_stat_percentile',
'stats_stat_powersum',
'stats_variance'],
'Stomp': ['stomp_connect_error', 'stomp_version'],
'Stream': ['set_socket_blocking',
'stream_bucket_append',
'stream_bucket_make_writeable',
'stream_bucket_new',
'stream_bucket_prepend',
'stream_context_create',
'stream_context_get_default',
'stream_context_get_options',
'stream_context_get_params',
'stream_context_set_default',
'stream_context_set_option',
'stream_context_set_params',
'stream_copy_to_stream',
'stream_encoding',
'stream_filter_append',
'stream_filter_prepend',
'stream_filter_register',
'stream_filter_remove',
'stream_get_contents',
'stream_get_filters',
'stream_get_line',
'stream_get_meta_data',
'stream_get_transports',
'stream_get_wrappers',
'stream_is_local',
'stream_notification_callback',
'stream_register_wrapper',
'stream_resolve_include_path',
'stream_select'],
'String': ['addcslashes',
'addslashes',
'bin2hex',
'chop',
'chr',
'chunk_split',
'convert_cyr_string',
'convert_uudecode',
'convert_uuencode',
'count_chars',
'crc32',
'crypt',
'echo',
'explode',
'fprintf',
'get_html_translation_table',
'hebrev',
'hebrevc',
'html_entity_decode',
'htmlentities',
'htmlspecialchars_decode',
'htmlspecialchars',
'implode',
'join',
'lcfirst',
'levenshtein',
'localeconv',
'ltrim',
'md5_file',
'md5',
'metaphone',
'money_format',
'nl_langinfo',
'nl2br',
'number_format',
'ord',
'parse_str',
'print',
'printf',
'quoted_printable_decode',
'quoted_printable_encode',
'quotemeta',
'rtrim',
'setlocale',
'sha1_file',
'sha1',
'similar_text',
'soundex',
'sprintf',
'sscanf',
'str_getcsv',
'str_ireplace',
'str_pad',
'str_repeat',
'str_replace',
'str_rot13',
'str_shuffle',
'str_split',
'str_word_count',
'strcasecmp',
'strchr',
'strcmp',
'strcoll',
'strcspn',
'strip_tags',
'stripcslashes',
'stripos',
'stripslashes',
'stristr',
'strlen',
'strnatcasecmp',
'strnatcmp',
'strncasecmp',
'strncmp',
'strpbrk',
'strpos',
'strrchr',
'strrev',
'strripos',
'strrpos',
'strspn'],
'Sybase': ['sybase_affected_rows',
'sybase_close',
'sybase_connect',
'sybase_data_seek',
'sybase_deadlock_retry_count',
'sybase_fetch_array',
'sybase_fetch_assoc',
'sybase_fetch_field',
'sybase_fetch_object',
'sybase_fetch_row',
'sybase_field_seek',
'sybase_free_result',
'sybase_get_last_message',
'sybase_min_client_severity',
'sybase_min_error_severity',
'sybase_min_message_severity',
'sybase_min_server_severity',
'sybase_num_fields',
'sybase_num_rows',
'sybase_pconnect',
'sybase_query',
'sybase_result',
'sybase_select_db',
'sybase_set_message_handler',
'sybase_unbuffered_query'],
'TCP': ['tcpwrap_check'],
'Tidy': ['ob_tidyhandler',
'tidy_access_count',
'tidy_config_count',
'tidy_error_count',
'tidy_get_error_buffer',
'tidy_get_output',
'tidy_load_config',
'tidy_reset_config',
'tidy_save_config',
'tidy_set_encoding',
'tidy_setopt',
'tidy_warning_count'],
'Tokenizer': ['token_get_all', 'token_name'],
'URL': ['base64_decode',
'base64_encode',
'get_headers',
'get_meta_tags',
'http_build_query',
'parse_url',
'rawurldecode',
'rawurlencode',
'urldecode',
'urlencode'],
'Variable handling': ['debug_zval_dump',
'doubleval',
'empty',
'floatval',
'get_defined_vars',
'get_resource_type',
'gettype',
'import_request_variables',
'intval',
'is_array',
'is_bool',
'is_callable',
'is_double',
'is_float',
'is_int',
'is_integer',
'is_long',
'is_null',
'is_numeric',
'is_object',
'is_real',
'is_resource',
'is_scalar',
'is_string',
'isset',
'print_r',
'serialize',
'settype',
'strval',
'unserialize',
'unset',
'var_dump',
'var_export'],
'W32api': ['w32api_deftype',
'w32api_init_dtype',
'w32api_invoke_function',
'w32api_register_function',
'w32api_set_call_method'],
'WDDX': ['wddx_add_vars',
'wddx_deserialize',
'wddx_packet_end',
'wddx_packet_start',
'wddx_serialize_value',
'wddx_serialize_vars',
'wddx_unserialize'],
'WinCache': ['wincache_fcache_fileinfo',
'wincache_fcache_meminfo',
'wincache_lock',
'wincache_ocache_fileinfo',
'wincache_ocache_meminfo',
'wincache_refresh_if_changed',
'wincache_rplist_fileinfo',
'wincache_rplist_meminfo',
'wincache_scache_info',
'wincache_scache_meminfo',
'wincache_ucache_add',
'wincache_ucache_cas',
'wincache_ucache_clear',
'wincache_ucache_dec',
'wincache_ucache_delete',
'wincache_ucache_exists',
'wincache_ucache_get',
'wincache_ucache_inc',
'wincache_ucache_info',
'wincache_ucache_meminfo',
'wincache_ucache_set',
'wincache_unlock'],
'XML Parser': ['utf8_decode'],
'XML-RPC': ['xmlrpc_decode_request',
'xmlrpc_decode',
'xmlrpc_encode_request',
'xmlrpc_encode',
'xmlrpc_get_type',
'xmlrpc_is_fault',
'xmlrpc_parse_method_descriptions',
'xmlrpc_server_add_introspection_data',
'xmlrpc_server_call_method',
'xmlrpc_server_create',
'xmlrpc_server_destroy',
'xmlrpc_server_register_introspection_callback',
'xmlrpc_server_register_method',
'xmlrpc_set_type'],
'XSLT (PHP4)': ['xslt_backend_info',
'xslt_backend_name',
'xslt_backend_version',
'xslt_create',
'xslt_errno',
'xslt_error',
'xslt_free',
'xslt_getopt',
'xslt_process',
'xslt_set_base',
'xslt_set_encoding',
'xslt_set_error_handler',
'xslt_set_log',
'xslt_set_object',
'xslt_set_sax_handler',
'xslt_set_sax_handlers',
'xslt_set_scheme_handler',
'xslt_set_scheme_handlers',
'xslt_setopt'],
'YAZ': ['yaz_addinfo',
'yaz_ccl_conf',
'yaz_ccl_parse',
'yaz_close',
'yaz_connect',
'yaz_database',
'yaz_element',
'yaz_errno',
'yaz_error',
'yaz_es_result',
'yaz_es',
'yaz_get_option',
'yaz_hits',
'yaz_itemorder',
'yaz_present',
'yaz_range',
'yaz_record',
'yaz_scan_result',
'yaz_scan',
'yaz_schema',
'yaz_search',
'yaz_set_option',
'yaz_sort',
'yaz_syntax',
'yaz_wait'],
'YP/NIS': ['yp_all',
'yp_cat',
'yp_err_string',
'yp_errno',
'yp_first',
'yp_get_default_domain',
'yp_master',
'yp_match',
'yp_next',
'yp_order'],
'Yaml': ['yaml_emit_file',
'yaml_emit',
'yaml_parse_file',
'yaml_parse_url',
'yaml_parse'],
'Zip': ['zip_close',
'zip_entry_close',
'zip_entry_compressedsize',
'zip_entry_compressionmethod',
'zip_entry_filesize',
'zip_entry_name',
'zip_entry_open',
'zip_entry_read',
'zip_open',
'zip_read'],
'Zlib': ['gzclose',
'gzcompress',
'gzdecode',
'gzdeflate',
'gzencode',
'gzeof',
'gzfile',
'gzgetc',
'gzgets',
'gzgetss',
'gzinflate',
'gzopen',
'gzpassthru',
'gzputs',
'gzread',
'gzrewind',
'gzseek',
'gztell',
'gzuncompress',
'gzwrite',
'readgzfile',
'zlib_get_coding_type'],
'bcompiler': ['bcompiler_load_exe',
'bcompiler_load',
'bcompiler_parse_class',
'bcompiler_read',
'bcompiler_write_class',
'bcompiler_write_constant',
'bcompiler_write_exe_footer',
'bcompiler_write_file',
'bcompiler_write_footer',
'bcompiler_write_function',
'bcompiler_write_functions_from_file',
'bcompiler_write_header',
'bcompiler_write_included_filename'],
'cURL': ['curl_close',
'curl_copy_handle',
'curl_errno',
'curl_error',
'curl_exec',
'curl_getinfo',
'curl_init',
'curl_multi_add_handle',
'curl_multi_close',
'curl_multi_exec',
'curl_multi_getcontent',
'curl_multi_info_read',
'curl_multi_init',
'curl_multi_remove_handle',
'curl_multi_select',
'curl_setopt_array',
'curl_setopt',
'curl_version'],
'chdb': ['chdb_create'],
'dBase': ['dbase_add_record',
'dbase_close',
'dbase_create',
'dbase_delete_record',
'dbase_get_header_info',
'dbase_get_record_with_names',
'dbase_get_record',
'dbase_numfields',
'dbase_numrecords',
'dbase_open',
'dbase_pack',
'dbase_replace_record'],
'dbx': ['dbx_close',
'dbx_compare',
'dbx_connect',
'dbx_error',
'dbx_escape_string',
'dbx_fetch_row'],
'filePro': ['filepro_fieldcount',
'filepro_fieldname',
'filepro_fieldtype',
'filepro_fieldwidth',
'filepro_retrieve',
'filepro_rowcount',
'filepro'],
'iconv': ['iconv_get_encoding',
'iconv_mime_decode_headers',
'iconv_mime_decode',
'iconv_mime_encode',
'iconv_set_encoding',
'iconv_strlen',
'iconv_strpos',
'iconv_strrpos',
'iconv_substr',
'iconv',
'ob_iconv_handler'],
'inclued': ['inclued_get_data'],
'intl': ['intl_error_name',
'intl_get_error_code',
'intl_get_error_message',
'intl_is_failure'],
'libxml': ['libxml_clear_errors',
'libxml_disable_entity_loader',
'libxml_get_errors',
'libxml_get_last_error',
'libxml_set_streams_context',
'libxml_use_internal_errors'],
'mSQL': ['msql_affected_rows',
'msql_close',
'msql_connect',
'msql_create_db',
'msql_createdb',
'msql_data_seek',
'msql_db_query',
'msql_dbname',
'msql_drop_db',
'msql_error',
'msql_fetch_array',
'msql_fetch_field',
'msql_fetch_object',
'msql_fetch_row',
'msql_field_flags',
'msql_field_len',
'msql_field_name',
'msql_field_seek',
'msql_field_table',
'msql_field_type',
'msql_fieldflags',
'msql_fieldlen',
'msql_fieldname',
'msql_fieldtable',
'msql_fieldtype',
'msql_free_result',
'msql_list_dbs',
'msql_list_fields',
'msql_list_tables',
'msql_num_fields',
'msql_num_rows',
'msql_numfields',
'msql_numrows',
'msql_pconnect',
'msql_query',
'msql_regcase',
'msql_result',
'msql_select_db',
'msql_tablename',
'msql'],
'mnoGoSearch': ['udm_add_search_limit',
'udm_alloc_agent_array',
'udm_alloc_agent',
'udm_api_version',
'udm_cat_list',
'udm_cat_path',
'udm_check_charset',
'udm_check_stored',
'udm_clear_search_limits',
'udm_close_stored',
'udm_crc32',
'udm_errno',
'udm_error',
'udm_find',
'udm_free_agent',
'udm_free_ispell_data',
'udm_free_res',
'udm_get_doc_count',
'udm_get_res_field',
'udm_get_res_param',
'udm_hash32',
'udm_load_ispell_data',
'udm_open_stored',
'udm_set_agent_param'],
'mqseries': ['mqseries_back',
'mqseries_begin',
'mqseries_close',
'mqseries_cmit',
'mqseries_conn',
'mqseries_connx',
'mqseries_disc',
'mqseries_get',
'mqseries_inq',
'mqseries_open',
'mqseries_put1',
'mqseries_put',
'mqseries_set',
'mqseries_strerror'],
'mysqlnd_qc': ['mysqlnd_qc_change_handler',
'mysqlnd_qc_clear_cache',
'mysqlnd_qc_get_cache_info',
'mysqlnd_qc_get_core_stats',
'mysqlnd_qc_get_handler',
'mysqlnd_qc_get_query_trace_log',
'mysqlnd_qc_set_user_handlers'],
'qtdom': ['qdom_error', 'qdom_tree'],
'runkit': ['runkit_class_adopt',
'runkit_class_emancipate',
'runkit_constant_add',
'runkit_constant_redefine',
'runkit_constant_remove',
'runkit_function_add',
'runkit_function_copy',
'runkit_function_redefine',
'runkit_function_remove',
'runkit_function_rename',
'runkit_import',
'runkit_lint_file',
'runkit_lint',
'runkit_method_add',
'runkit_method_copy',
'runkit_method_redefine',
'runkit_method_remove',
'runkit_method_rename',
'runkit_return_value_used',
'runkit_sandbox_output_handler',
'runkit_superglobals'],
'ssdeep': ['ssdeep_fuzzy_compare',
'ssdeep_fuzzy_hash_filename',
'ssdeep_fuzzy_hash'],
'vpopmail': ['vpopmail_add_alias_domain_ex',
'vpopmail_add_alias_domain',
'vpopmail_add_domain_ex',
'vpopmail_add_domain',
'vpopmail_add_user',
'vpopmail_alias_add',
'vpopmail_alias_del_domain',
'vpopmail_alias_del',
'vpopmail_alias_get_all',
'vpopmail_alias_get',
'vpopmail_auth_user',
'vpopmail_del_domain_ex',
'vpopmail_del_domain',
'vpopmail_del_user',
'vpopmail_error',
'vpopmail_passwd',
'vpopmail_set_user_quota'],
'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'],
'win32service': ['win32_continue_service',
'win32_create_service',
'win32_delete_service',
'win32_get_last_control_message',
'win32_pause_service',
'win32_query_service_status',
'win32_set_service_status',
'win32_start_service_ctrl_dispatcher',
'win32_start_service',
'win32_stop_service'],
'xattr': ['xattr_get',
'xattr_list',
'xattr_remove',
'xattr_set',
'xattr_supported'],
'xdiff': ['xdiff_file_bdiff_size',
'xdiff_file_bdiff',
'xdiff_file_bpatch',
'xdiff_file_diff_binary',
'xdiff_file_diff',
'xdiff_file_merge3',
'xdiff_file_patch_binary',
'xdiff_file_patch',
'xdiff_file_rabdiff',
'xdiff_string_bdiff_size',
'xdiff_string_bdiff',
'xdiff_string_bpatch',
'xdiff_string_diff_binary',
'xdiff_string_diff',
'xdiff_string_merge3',
'xdiff_string_patch_binary',
'xdiff_string_patch',
'xdiff_string_rabdiff']}
if __name__ == '__main__':
import glob
import os
import pprint
import re
import shutil
import tarfile
import urllib.request, urllib.parse, urllib.error
PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
PHP_MANUAL_DIR = './php-chunked-xhtml/'
PHP_REFERENCE_GLOB = 'ref.*'
PHP_FUNCTION_RE = '<a href="function\..*?\.html">(.*?)</a>'
PHP_MODULE_RE = '<title>(.*?) Functions</title>'
def get_php_functions():
function_re = re.compile(PHP_FUNCTION_RE)
module_re = re.compile(PHP_MODULE_RE)
modules = {}
for file in get_php_references():
module = ''
for line in open(file):
if not module:
search = module_re.search(line)
if search:
module = search.group(1)
modules[module] = []
elif '<h2>Table of Contents</h2>' in line:
for match in function_re.finditer(line):
fn = match.group(1)
if '->' not in fn and '::' not in fn:
modules[module].append(fn)
# These are dummy manual pages, not actual functions
if module == 'PHP Options/Info':
modules[module].remove('main')
elif module == 'Filesystem':
modules[module].remove('delete')
if not modules[module]:
del modules[module]
break
return modules
def get_php_references():
download = urllib.request.urlretrieve(PHP_MANUAL_URL)
tar = tarfile.open(download[0])
tar.extractall()
tar.close()
for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
yield file
os.remove(download[0])
def regenerate(filename, modules):
f = open(filename)
try:
content = f.read()
finally:
f.close()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
f = open(filename, 'w')
f.write(header)
f.write('MODULES = %s\n\n' % pprint.pformat(modules))
f.write(footer)
f.close()
def run():
print('>> Downloading Function Index')
modules = get_php_functions()
total = sum(len(v) for v in modules.values())
print('%d functions found' % total)
regenerate(__file__, modules)
shutil.rmtree(PHP_MANUAL_DIR)
run()
| mit |
mcgoddard/widgetr | env/Lib/site-packages/werkzeug/urls.py | 216 | 36710 | # -*- coding: utf-8 -*-
"""
werkzeug.urls
~~~~~~~~~~~~~
``werkzeug.urls`` used to provide several wrapper functions for Python 2
urlparse, whose main purpose were to work around the behavior of the Py2
stdlib and its lack of unicode support. While this was already a somewhat
inconvenient situation, it got even more complicated because Python 3's
``urllib.parse`` actually does handle unicode properly. In other words,
this module would wrap two libraries with completely different behavior. So
now this module contains a 2-and-3-compatible backport of Python 3's
``urllib.parse``, which is mostly API-compatible.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import re
from werkzeug._compat import text_type, PY2, to_unicode, \
to_native, implements_to_string, try_coerce_native, \
normalize_string_tuple, make_literal_wrapper, \
fix_tuple_repr
from werkzeug._internal import _encode_idna, _decode_idna
from werkzeug.datastructures import MultiDict, iter_multi_items
from collections import namedtuple
# A regular expression for what a valid schema looks like
_scheme_re = re.compile(r'^[a-zA-Z0-9+-.]+$')
# Characters that are safe in any part of an URL.
_always_safe = (b'abcdefghijklmnopqrstuvwxyz'
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+')
_hexdigits = '0123456789ABCDEFabcdef'
_hextobyte = dict(
((a + b).encode(), int(a + b, 16))
for a in _hexdigits for b in _hexdigits
)
_URLTuple = fix_tuple_repr(namedtuple(
'_URLTuple',
['scheme', 'netloc', 'path', 'query', 'fragment']
))
class BaseURL(_URLTuple):
'''Superclass of :py:class:`URL` and :py:class:`BytesURL`.'''
__slots__ = ()
def replace(self, **kwargs):
"""Return an URL with the same values, except for those parameters
given new values by whichever keyword arguments are specified."""
return self._replace(**kwargs)
@property
def host(self):
"""The host part of the URL if available, otherwise `None`. The
host is either the hostname or the IP address mentioned in the
URL. It will not contain the port.
"""
return self._split_host()[0]
@property
def ascii_host(self):
"""Works exactly like :attr:`host` but will return a result that
is restricted to ASCII. If it finds a netloc that is not ASCII
it will attempt to idna decode it. This is useful for socket
operations when the URL might include internationalized characters.
"""
rv = self.host
if rv is not None and isinstance(rv, text_type):
try:
rv = _encode_idna(rv)
except UnicodeError:
rv = rv.encode('ascii', 'ignore')
return to_native(rv, 'ascii', 'ignore')
@property
def port(self):
"""The port in the URL as an integer if it was present, `None`
otherwise. This does not fill in default ports.
"""
try:
rv = int(to_native(self._split_host()[1]))
if 0 <= rv <= 65535:
return rv
except (ValueError, TypeError):
pass
@property
def auth(self):
"""The authentication part in the URL if available, `None`
otherwise.
"""
return self._split_netloc()[0]
@property
def username(self):
"""The username if it was part of the URL, `None` otherwise.
This undergoes URL decoding and will always be a unicode string.
"""
rv = self._split_auth()[0]
if rv is not None:
return _url_unquote_legacy(rv)
@property
def raw_username(self):
"""The username if it was part of the URL, `None` otherwise.
Unlike :attr:`username` this one is not being decoded.
"""
return self._split_auth()[0]
@property
def password(self):
"""The password if it was part of the URL, `None` otherwise.
This undergoes URL decoding and will always be a unicode string.
"""
rv = self._split_auth()[1]
if rv is not None:
return _url_unquote_legacy(rv)
@property
def raw_password(self):
"""The password if it was part of the URL, `None` otherwise.
Unlike :attr:`password` this one is not being decoded.
"""
return self._split_auth()[1]
def decode_query(self, *args, **kwargs):
"""Decodes the query part of the URL. Ths is a shortcut for
calling :func:`url_decode` on the query argument. The arguments and
keyword arguments are forwarded to :func:`url_decode` unchanged.
"""
return url_decode(self.query, *args, **kwargs)
def join(self, *args, **kwargs):
"""Joins this URL with another one. This is just a convenience
function for calling into :meth:`url_join` and then parsing the
return value again.
"""
return url_parse(url_join(self, *args, **kwargs))
def to_url(self):
"""Returns a URL string or bytes depending on the type of the
information stored. This is just a convenience function
for calling :meth:`url_unparse` for this URL.
"""
return url_unparse(self)
def decode_netloc(self):
"""Decodes the netloc part into a string."""
rv = _decode_idna(self.host or '')
if ':' in rv:
rv = '[%s]' % rv
port = self.port
if port is not None:
rv = '%s:%d' % (rv, port)
auth = ':'.join(filter(None, [
_url_unquote_legacy(self.raw_username or '', '/:%@'),
_url_unquote_legacy(self.raw_password or '', '/:%@'),
]))
if auth:
rv = '%s@%s' % (auth, rv)
return rv
def to_uri_tuple(self):
"""Returns a :class:`BytesURL` tuple that holds a URI. This will
encode all the information in the URL properly to ASCII using the
rules a web browser would follow.
It's usually more interesting to directly call :meth:`iri_to_uri` which
will return a string.
"""
return url_parse(iri_to_uri(self).encode('ascii'))
def to_iri_tuple(self):
"""Returns a :class:`URL` tuple that holds a IRI. This will try
to decode as much information as possible in the URL without
losing information similar to how a web browser does it for the
URL bar.
It's usually more interesting to directly call :meth:`uri_to_iri` which
will return a string.
"""
return url_parse(uri_to_iri(self))
def get_file_location(self, pathformat=None):
"""Returns a tuple with the location of the file in the form
``(server, location)``. If the netloc is empty in the URL or
points to localhost, it's represented as ``None``.
The `pathformat` by default is autodetection but needs to be set
when working with URLs of a specific system. The supported values
are ``'windows'`` when working with Windows or DOS paths and
``'posix'`` when working with posix paths.
If the URL does not point to to a local file, the server and location
are both represented as ``None``.
:param pathformat: The expected format of the path component.
Currently ``'windows'`` and ``'posix'`` are
supported. Defaults to ``None`` which is
autodetect.
"""
if self.scheme != 'file':
return None, None
path = url_unquote(self.path)
host = self.netloc or None
if pathformat is None:
if os.name == 'nt':
pathformat = 'windows'
else:
pathformat = 'posix'
if pathformat == 'windows':
if path[:1] == '/' and path[1:2].isalpha() and path[2:3] in '|:':
path = path[1:2] + ':' + path[3:]
windows_share = path[:3] in ('\\' * 3, '/' * 3)
import ntpath
path = ntpath.normpath(path)
# Windows shared drives are represented as ``\\host\\directory``.
# That results in a URL like ``file://///host/directory``, and a
# path like ``///host/directory``. We need to special-case this
# because the path contains the hostname.
if windows_share and host is None:
parts = path.lstrip('\\').split('\\', 1)
if len(parts) == 2:
host, path = parts
else:
host = parts[0]
path = ''
elif pathformat == 'posix':
import posixpath
path = posixpath.normpath(path)
else:
raise TypeError('Invalid path format %s' % repr(pathformat))
if host in ('127.0.0.1', '::1', 'localhost'):
host = None
return host, path
def _split_netloc(self):
if self._at in self.netloc:
return self.netloc.split(self._at, 1)
return None, self.netloc
def _split_auth(self):
auth = self._split_netloc()[0]
if not auth:
return None, None
if self._colon not in auth:
return auth, None
return auth.split(self._colon, 1)
def _split_host(self):
rv = self._split_netloc()[1]
if not rv:
return None, None
if not rv.startswith(self._lbracket):
if self._colon in rv:
return rv.split(self._colon, 1)
return rv, None
idx = rv.find(self._rbracket)
if idx < 0:
return rv, None
host = rv[1:idx]
rest = rv[idx + 1:]
if rest.startswith(self._colon):
return host, rest[1:]
return host, None
@implements_to_string
class URL(BaseURL):
"""Represents a parsed URL. This behaves like a regular tuple but
also has some extra attributes that give further insight into the
URL.
"""
__slots__ = ()
_at = '@'
_colon = ':'
_lbracket = '['
_rbracket = ']'
def __str__(self):
return self.to_url()
def encode_netloc(self):
"""Encodes the netloc part to an ASCII safe URL as bytes."""
rv = self.ascii_host or ''
if ':' in rv:
rv = '[%s]' % rv
port = self.port
if port is not None:
rv = '%s:%d' % (rv, port)
auth = ':'.join(filter(None, [
url_quote(self.raw_username or '', 'utf-8', 'strict', '/:%'),
url_quote(self.raw_password or '', 'utf-8', 'strict', '/:%'),
]))
if auth:
rv = '%s@%s' % (auth, rv)
return to_native(rv)
def encode(self, charset='utf-8', errors='replace'):
"""Encodes the URL to a tuple made out of bytes. The charset is
only being used for the path, query and fragment.
"""
return BytesURL(
self.scheme.encode('ascii'),
self.encode_netloc(),
self.path.encode(charset, errors),
self.query.encode(charset, errors),
self.fragment.encode(charset, errors)
)
class BytesURL(BaseURL):
"""Represents a parsed URL in bytes."""
__slots__ = ()
_at = b'@'
_colon = b':'
_lbracket = b'['
_rbracket = b']'
def __str__(self):
return self.to_url().decode('utf-8', 'replace')
def encode_netloc(self):
"""Returns the netloc unchanged as bytes."""
return self.netloc
def decode(self, charset='utf-8', errors='replace'):
"""Decodes the URL to a tuple made out of strings. The charset is
only being used for the path, query and fragment.
"""
return URL(
self.scheme.decode('ascii'),
self.decode_netloc(),
self.path.decode(charset, errors),
self.query.decode(charset, errors),
self.fragment.decode(charset, errors)
)
def _unquote_to_bytes(string, unsafe=''):
if isinstance(string, text_type):
string = string.encode('utf-8')
if isinstance(unsafe, text_type):
unsafe = unsafe.encode('utf-8')
unsafe = frozenset(bytearray(unsafe))
bits = iter(string.split(b'%'))
result = bytearray(next(bits, b''))
for item in bits:
try:
char = _hextobyte[item[:2]]
if char in unsafe:
raise KeyError()
result.append(char)
result.extend(item[2:])
except KeyError:
result.extend(b'%')
result.extend(item)
return bytes(result)
def _url_encode_impl(obj, charset, encode_keys, sort, key):
iterable = iter_multi_items(obj)
if sort:
iterable = sorted(iterable, key=key)
for key, value in iterable:
if value is None:
continue
if not isinstance(key, bytes):
key = text_type(key).encode(charset)
if not isinstance(value, bytes):
value = text_type(value).encode(charset)
yield url_quote_plus(key) + '=' + url_quote_plus(value)
def _url_unquote_legacy(value, unsafe=''):
try:
return url_unquote(value, charset='utf-8',
errors='strict', unsafe=unsafe)
except UnicodeError:
return url_unquote(value, charset='latin1', unsafe=unsafe)
def url_parse(url, scheme=None, allow_fragments=True):
"""Parses a URL from a string into a :class:`URL` tuple. If the URL
is lacking a scheme it can be provided as second argument. Otherwise,
it is ignored. Optionally fragments can be stripped from the URL
by setting `allow_fragments` to `False`.
The inverse of this function is :func:`url_unparse`.
:param url: the URL to parse.
:param scheme: the default schema to use if the URL is schemaless.
:param allow_fragments: if set to `False` a fragment will be removed
from the URL.
"""
s = make_literal_wrapper(url)
is_text_based = isinstance(url, text_type)
if scheme is None:
scheme = s('')
netloc = query = fragment = s('')
i = url.find(s(':'))
if i > 0 and _scheme_re.match(to_native(url[:i], errors='replace')):
# make sure "iri" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i + 1:]
if not rest or any(c not in s('0123456789') for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest
if url[:2] == s('//'):
delim = len(url)
for c in s('/?#'):
wdelim = url.find(c, 2)
if wdelim >= 0:
delim = min(delim, wdelim)
netloc, url = url[2:delim], url[delim:]
if (s('[') in netloc and s(']') not in netloc) or \
(s(']') in netloc and s('[') not in netloc):
raise ValueError('Invalid IPv6 URL')
if allow_fragments and s('#') in url:
url, fragment = url.split(s('#'), 1)
if s('?') in url:
url, query = url.split(s('?'), 1)
result_type = is_text_based and URL or BytesURL
return result_type(scheme, netloc, url, query, fragment)
def url_quote(string, charset='utf-8', errors='strict', safe='/:', unsafe=''):
"""URL encode a single string with a given encoding.
:param s: the string to quote.
:param charset: the charset to be used.
:param safe: an optional sequence of safe characters.
:param unsafe: an optional sequence of unsafe characters.
.. versionadded:: 0.9.2
The `unsafe` parameter was added.
"""
if not isinstance(string, (text_type, bytes, bytearray)):
string = text_type(string)
if isinstance(string, text_type):
string = string.encode(charset, errors)
if isinstance(safe, text_type):
safe = safe.encode(charset, errors)
if isinstance(unsafe, text_type):
unsafe = unsafe.encode(charset, errors)
safe = frozenset(bytearray(safe) + _always_safe) - frozenset(bytearray(unsafe))
rv = bytearray()
for char in bytearray(string):
if char in safe:
rv.append(char)
else:
rv.extend(('%%%02X' % char).encode('ascii'))
return to_native(bytes(rv))
def url_quote_plus(string, charset='utf-8', errors='strict', safe=''):
"""URL encode a single string with the given encoding and convert
whitespace to "+".
:param s: The string to quote.
:param charset: The charset to be used.
:param safe: An optional sequence of safe characters.
"""
return url_quote(string, charset, errors, safe + ' ', '+').replace(' ', '+')
def url_unparse(components):
"""The reverse operation to :meth:`url_parse`. This accepts arbitrary
as well as :class:`URL` tuples and returns a URL as a string.
:param components: the parsed URL as tuple which should be converted
into a URL string.
"""
scheme, netloc, path, query, fragment = \
normalize_string_tuple(components)
s = make_literal_wrapper(scheme)
url = s('')
# We generally treat file:///x and file:/x the same which is also
# what browsers seem to do. This also allows us to ignore a schema
# register for netloc utilization or having to differenciate between
# empty and missing netloc.
if netloc or (scheme and path.startswith(s('/'))):
if path and path[:1] != s('/'):
path = s('/') + path
url = s('//') + (netloc or s('')) + path
elif path:
url += path
if scheme:
url = scheme + s(':') + url
if query:
url = url + s('?') + query
if fragment:
url = url + s('#') + fragment
return url
def url_unquote(string, charset='utf-8', errors='replace', unsafe=''):
"""URL decode a single string with a given encoding. If the charset
is set to `None` no unicode decoding is performed and raw bytes
are returned.
:param s: the string to unquote.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param errors: the error handling for the charset decoding.
"""
rv = _unquote_to_bytes(string, unsafe)
if charset is not None:
rv = rv.decode(charset, errors)
return rv
def url_unquote_plus(s, charset='utf-8', errors='replace'):
"""URL decode a single string with the given `charset` and decode "+" to
whitespace.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
:exc:`HTTPUnicodeError` is raised.
:param s: The string to unquote.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param errors: The error handling for the `charset` decoding.
"""
if isinstance(s, text_type):
s = s.replace(u'+', u' ')
else:
s = s.replace(b'+', b' ')
return url_unquote(s, charset, errors)
def url_fix(s, charset='utf-8'):
r"""Sometimes you get an URL by a user that just isn't a real URL because
it contains unsafe characters like ' ' and so on. This function can fix
some of the problems in a similar way browsers handle data entered by the
user:
>>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)')
'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)'
:param s: the string with the URL to fix.
:param charset: The target charset for the URL if the url was given as
unicode string.
"""
# First step is to switch to unicode processing and to convert
# backslashes (which are invalid in URLs anyways) to slashes. This is
# consistent with what Chrome does.
s = to_unicode(s, charset, 'replace').replace('\\', '/')
# For the specific case that we look like a malformed windows URL
# we want to fix this up manually:
if s.startswith('file://') and s[7:8].isalpha() and s[8:10] in (':/', '|/'):
s = 'file:///' + s[7:]
url = url_parse(s)
path = url_quote(url.path, charset, safe='/%+$!*\'(),')
qs = url_quote_plus(url.query, charset, safe=':&%=+$!*\'(),')
anchor = url_quote_plus(url.fragment, charset, safe=':&%=+$!*\'(),')
return to_native(url_unparse((url.scheme, url.encode_netloc(),
path, qs, anchor)))
def uri_to_iri(uri, charset='utf-8', errors='replace'):
r"""
Converts a URI in a given charset to a IRI.
Examples for URI versus IRI:
>>> uri_to_iri(b'http://xn--n3h.net/')
u'http://\u2603.net/'
>>> uri_to_iri(b'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th')
u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
Query strings are left unchanged:
>>> uri_to_iri('/?foo=24&x=%26%2f')
u'/?foo=24&x=%26%2f'
.. versionadded:: 0.6
:param uri: The URI to convert.
:param charset: The charset of the URI.
:param errors: The error handling on decode.
"""
if isinstance(uri, tuple):
uri = url_unparse(uri)
uri = url_parse(to_unicode(uri, charset))
path = url_unquote(uri.path, charset, errors, '%/;?')
query = url_unquote(uri.query, charset, errors, '%;/?:@&=+,$#')
fragment = url_unquote(uri.fragment, charset, errors, '%;/?:@&=+,$#')
return url_unparse((uri.scheme, uri.decode_netloc(),
path, query, fragment))
def iri_to_uri(iri, charset='utf-8', errors='strict', safe_conversion=False):
r"""
Converts any unicode based IRI to an acceptable ASCII URI. Werkzeug always
uses utf-8 URLs internally because this is what browsers and HTTP do as
well. In some places where it accepts an URL it also accepts a unicode IRI
and converts it into a URI.
Examples for IRI versus URI:
>>> iri_to_uri(u'http://☃.net/')
'http://xn--n3h.net/'
>>> iri_to_uri(u'http://üser:pässword@☃.net/påth')
'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th'
There is a general problem with IRI and URI conversion with some
protocols that appear in the wild that are in violation of the URI
specification. In places where Werkzeug goes through a forced IRI to
URI conversion it will set the `safe_conversion` flag which will
not perform a conversion if the end result is already ASCII. This
can mean that the return value is not an entirely correct URI but
it will not destroy such invalid URLs in the process.
As an example consider the following two IRIs::
magnet:?xt=uri:whatever
itms-services://?action=download-manifest
The internal representation after parsing of those URLs is the same
and there is no way to reconstruct the original one. If safe
conversion is enabled however this function becomes a noop for both of
those strings as they both can be considered URIs.
.. versionadded:: 0.6
.. versionchanged:: 0.9.6
The `safe_conversion` parameter was added.
:param iri: The IRI to convert.
:param charset: The charset for the URI.
:param safe_conversion: indicates if a safe conversion should take place.
For more information see the explanation above.
"""
if isinstance(iri, tuple):
iri = url_unparse(iri)
if safe_conversion:
try:
native_iri = to_native(iri)
ascii_iri = to_native(iri).encode('ascii')
if ascii_iri.split() == [ascii_iri]:
return native_iri
except UnicodeError:
pass
iri = url_parse(to_unicode(iri, charset, errors))
netloc = iri.encode_netloc()
path = url_quote(iri.path, charset, errors, '/:~+%')
query = url_quote(iri.query, charset, errors, '%&[]:;$*()+,!?*/=')
fragment = url_quote(iri.fragment, charset, errors, '=%&[]:;$()+,!?*/')
return to_native(url_unparse((iri.scheme, netloc,
path, query, fragment)))
def url_decode(s, charset='utf-8', decode_keys=False, include_empty=True,
errors='replace', separator='&', cls=None):
"""
Parse a querystring and return it as :class:`MultiDict`. There is a
difference in key decoding on different Python versions. On Python 3
keys will always be fully decoded whereas on Python 2, keys will
remain bytestrings if they fit into ASCII. On 2.x keys can be forced
to be unicode by setting `decode_keys` to `True`.
If the charset is set to `None` no unicode decoding will happen and
raw bytes will be returned.
Per default a missing value for a key will default to an empty key. If
you don't want that behavior you can set `include_empty` to `False`.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
`HTTPUnicodeError` is raised.
.. versionchanged:: 0.5
In previous versions ";" and "&" could be used for url decoding.
This changed in 0.5 where only "&" is supported. If you want to
use ";" instead a different `separator` can be provided.
The `cls` parameter was added.
:param s: a string with the query string to decode.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param decode_keys: Used on Python 2.x to control whether keys should
be forced to be unicode objects. If set to `True`
then keys will be unicode in all cases. Otherwise,
they remain `str` if they fit into ASCII.
:param include_empty: Set to `False` if you don't want empty values to
appear in the dict.
:param errors: the decoding error behavior.
:param separator: the pair separator to be used, defaults to ``&``
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
"""
if cls is None:
cls = MultiDict
if isinstance(s, text_type) and not isinstance(separator, text_type):
separator = separator.decode(charset or 'ascii')
elif isinstance(s, bytes) and not isinstance(separator, bytes):
separator = separator.encode(charset or 'ascii')
return cls(_url_decode_impl(s.split(separator), charset, decode_keys,
include_empty, errors))
def url_decode_stream(stream, charset='utf-8', decode_keys=False,
include_empty=True, errors='replace', separator='&',
cls=None, limit=None, return_iterator=False):
"""Works like :func:`url_decode` but decodes a stream. The behavior
of stream and limit follows functions like
:func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is
directly fed to the `cls` so you can consume the data while it's
parsed.
.. versionadded:: 0.8
:param stream: a stream with the encoded querystring
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param decode_keys: Used on Python 2.x to control whether keys should
be forced to be unicode objects. If set to `True`,
keys will be unicode in all cases. Otherwise, they
remain `str` if they fit into ASCII.
:param include_empty: Set to `False` if you don't want empty values to
appear in the dict.
:param errors: the decoding error behavior.
:param separator: the pair separator to be used, defaults to ``&``
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
:param limit: the content length of the URL data. Not necessary if
a limited stream is provided.
:param return_iterator: if set to `True` the `cls` argument is ignored
and an iterator over all decoded pairs is
returned
"""
from werkzeug.wsgi import make_chunk_iter
if return_iterator:
cls = lambda x: x
elif cls is None:
cls = MultiDict
pair_iter = make_chunk_iter(stream, separator, limit)
return cls(_url_decode_impl(pair_iter, charset, decode_keys,
include_empty, errors))
def _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors):
for pair in pair_iter:
if not pair:
continue
s = make_literal_wrapper(pair)
equal = s('=')
if equal in pair:
key, value = pair.split(equal, 1)
else:
if not include_empty:
continue
key = pair
value = s('')
key = url_unquote_plus(key, charset, errors)
if charset is not None and PY2 and not decode_keys:
key = try_coerce_native(key)
yield key, url_unquote_plus(value, charset, errors)
def url_encode(obj, charset='utf-8', encode_keys=False, sort=False, key=None,
separator=b'&'):
"""URL encode a dict/`MultiDict`. If a value is `None` it will not appear
in the result string. Per default only values are encoded into the target
charset strings. If `encode_keys` is set to ``True`` unicode keys are
supported too.
If `sort` is set to `True` the items are sorted by `key` or the default
sorting algorithm.
.. versionadded:: 0.5
`sort`, `key`, and `separator` were added.
:param obj: the object to encode into a query string.
:param charset: the charset of the query string.
:param encode_keys: set to `True` if you have unicode keys. (Ignored on
Python 3.x)
:param sort: set to `True` if you want parameters to be sorted by `key`.
:param separator: the separator to be used for the pairs.
:param key: an optional function to be used for sorting. For more details
check out the :func:`sorted` documentation.
"""
separator = to_native(separator, 'ascii')
return separator.join(_url_encode_impl(obj, charset, encode_keys, sort, key))
def url_encode_stream(obj, stream=None, charset='utf-8', encode_keys=False,
sort=False, key=None, separator=b'&'):
"""Like :meth:`url_encode` but writes the results to a stream
object. If the stream is `None` a generator over all encoded
pairs is returned.
.. versionadded:: 0.8
:param obj: the object to encode into a query string.
:param stream: a stream to write the encoded object into or `None` if
an iterator over the encoded pairs should be returned. In
that case the separator argument is ignored.
:param charset: the charset of the query string.
:param encode_keys: set to `True` if you have unicode keys. (Ignored on
Python 3.x)
:param sort: set to `True` if you want parameters to be sorted by `key`.
:param separator: the separator to be used for the pairs.
:param key: an optional function to be used for sorting. For more details
check out the :func:`sorted` documentation.
"""
separator = to_native(separator, 'ascii')
gen = _url_encode_impl(obj, charset, encode_keys, sort, key)
if stream is None:
return gen
for idx, chunk in enumerate(gen):
if idx:
stream.write(separator)
stream.write(chunk)
def url_join(base, url, allow_fragments=True):
"""Join a base URL and a possibly relative URL to form an absolute
interpretation of the latter.
:param base: the base URL for the join operation.
:param url: the URL to join.
:param allow_fragments: indicates whether fragments should be allowed.
"""
if isinstance(base, tuple):
base = url_unparse(base)
if isinstance(url, tuple):
url = url_unparse(url)
base, url = normalize_string_tuple((base, url))
s = make_literal_wrapper(base)
if not base:
return url
if not url:
return base
bscheme, bnetloc, bpath, bquery, bfragment = \
url_parse(base, allow_fragments=allow_fragments)
scheme, netloc, path, query, fragment = \
url_parse(url, bscheme, allow_fragments)
if scheme != bscheme:
return url
if netloc:
return url_unparse((scheme, netloc, path, query, fragment))
netloc = bnetloc
if path[:1] == s('/'):
segments = path.split(s('/'))
elif not path:
segments = bpath.split(s('/'))
if not query:
query = bquery
else:
segments = bpath.split(s('/'))[:-1] + path.split(s('/'))
# If the rightmost part is "./" we want to keep the slash but
# remove the dot.
if segments[-1] == s('.'):
segments[-1] = s('')
# Resolve ".." and "."
segments = [segment for segment in segments if segment != s('.')]
while 1:
i = 1
n = len(segments) - 1
while i < n:
if segments[i] == s('..') and \
segments[i - 1] not in (s(''), s('..')):
del segments[i - 1:i + 1]
break
i += 1
else:
break
# Remove trailing ".." if the URL is absolute
unwanted_marker = [s(''), s('..')]
while segments[:2] == unwanted_marker:
del segments[1]
path = s('/').join(segments)
return url_unparse((scheme, netloc, path, query, fragment))
class Href(object):
"""Implements a callable that constructs URLs with the given base. The
function can be called with any number of positional and keyword
arguments which than are used to assemble the URL. Works with URLs
and posix paths.
Positional arguments are appended as individual segments to
the path of the URL:
>>> href = Href('/foo')
>>> href('bar', 23)
'/foo/bar/23'
>>> href('foo', bar=23)
'/foo/foo?bar=23'
If any of the arguments (positional or keyword) evaluates to `None` it
will be skipped. If no keyword arguments are given the last argument
can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass),
otherwise the keyword arguments are used for the query parameters, cutting
off the first trailing underscore of the parameter name:
>>> href(is_=42)
'/foo?is=42'
>>> href({'foo': 'bar'})
'/foo?foo=bar'
Combining of both methods is not allowed:
>>> href({'foo': 'bar'}, bar=42)
Traceback (most recent call last):
...
TypeError: keyword arguments and query-dicts can't be combined
Accessing attributes on the href object creates a new href object with
the attribute name as prefix:
>>> bar_href = href.bar
>>> bar_href("blub")
'/foo/bar/blub'
If `sort` is set to `True` the items are sorted by `key` or the default
sorting algorithm:
>>> href = Href("/", sort=True)
>>> href(a=1, b=2, c=3)
'/?a=1&b=2&c=3'
.. versionadded:: 0.5
`sort` and `key` were added.
"""
def __init__(self, base='./', charset='utf-8', sort=False, key=None):
if not base:
base = './'
self.base = base
self.charset = charset
self.sort = sort
self.key = key
def __getattr__(self, name):
if name[:2] == '__':
raise AttributeError(name)
base = self.base
if base[-1:] != '/':
base += '/'
return Href(url_join(base, name), self.charset, self.sort, self.key)
def __call__(self, *path, **query):
if path and isinstance(path[-1], dict):
if query:
raise TypeError('keyword arguments and query-dicts '
'can\'t be combined')
query, path = path[-1], path[:-1]
elif query:
query = dict([(k.endswith('_') and k[:-1] or k, v)
for k, v in query.items()])
path = '/'.join([to_unicode(url_quote(x, self.charset), 'ascii')
for x in path if x is not None]).lstrip('/')
rv = self.base
if path:
if not rv.endswith('/'):
rv += '/'
rv = url_join(rv, './' + path)
if query:
rv += '?' + to_unicode(url_encode(query, self.charset, sort=self.sort,
key=self.key), 'ascii')
return to_native(rv)
| mit |
Thor77/youtube-dl | youtube_dl/extractor/esri.py | 35 | 2627 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
int_or_none,
parse_filesize,
unified_strdate,
)
class EsriVideoIE(InfoExtractor):
_VALID_URL = r'https?://video\.esri\.com/watch/(?P<id>[0-9]+)'
_TEST = {
'url': 'https://video.esri.com/watch/1124/arcgis-online-_dash_-developing-applications',
'md5': 'd4aaf1408b221f1b38227a9bbaeb95bc',
'info_dict': {
'id': '1124',
'ext': 'mp4',
'title': 'ArcGIS Online - Developing Applications',
'description': 'Jeremy Bartley demonstrates how to develop applications with ArcGIS Online.',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 185,
'upload_date': '20120419',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
formats = []
for width, height, content in re.findall(
r'(?s)<li><strong>(\d+)x(\d+):</strong>(.+?)</li>', webpage):
for video_url, ext, filesize in re.findall(
r'<a[^>]+href="([^"]+)">([^<]+) \(([^<]+)\)</a>', content):
formats.append({
'url': compat_urlparse.urljoin(url, video_url),
'ext': ext.lower(),
'format_id': '%s-%s' % (ext.lower(), height),
'width': int(width),
'height': int(height),
'filesize_approx': parse_filesize(filesize),
})
self._sort_formats(formats)
title = self._html_search_meta('title', webpage, 'title')
description = self._html_search_meta(
'description', webpage, 'description', fatal=False)
thumbnail = self._html_search_meta('thumbnail', webpage, 'thumbnail', fatal=False)
if thumbnail:
thumbnail = re.sub(r'_[st]\.jpg$', '_x.jpg', thumbnail)
duration = int_or_none(self._search_regex(
[r'var\s+videoSeconds\s*=\s*(\d+)', r"'duration'\s*:\s*(\d+)"],
webpage, 'duration', fatal=False))
upload_date = unified_strdate(self._html_search_meta(
'last-modified', webpage, 'upload date', fatal=False))
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'upload_date': upload_date,
'formats': formats
}
| unlicense |
blink1073/scikit-image | skimage/segmentation/tests/test_felzenszwalb.py | 27 | 2021 | import numpy as np
from numpy.testing import assert_equal, assert_array_equal
from skimage._shared.testing import assert_greater, test_parallel
from skimage.segmentation import felzenszwalb
from skimage import data
@test_parallel()
def test_grey():
# very weak tests. This algorithm is pretty unstable.
img = np.zeros((20, 21))
img[:10, 10:] = 0.2
img[10:, :10] = 0.4
img[10:, 10:] = 0.6
seg = felzenszwalb(img, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
# that mostly respect the 4 regions:
for i in range(4):
hist = np.histogram(img[seg == i], bins=[0, 0.1, 0.3, 0.5, 1])[0]
assert_greater(hist[i], 40)
def test_minsize():
# single-channel:
img = data.coins()[20:168,0:128]
for min_size in np.arange(10, 100, 10):
segments = felzenszwalb(img, min_size=min_size, sigma=3)
counts = np.bincount(segments.ravel())
# actually want to test greater or equal.
assert_greater(counts.min() + 1, min_size)
# multi-channel:
coffee = data.coffee()[::4, ::4]
for min_size in np.arange(10, 100, 10):
segments = felzenszwalb(coffee, min_size=min_size, sigma=3)
counts = np.bincount(segments.ravel())
# actually want to test greater or equal.
# the construction doesn't guarantee min_size is respected
# after intersecting the sementations for the colors
assert_greater(np.mean(counts) + 1, min_size)
def test_color():
# very weak tests. This algorithm is pretty unstable.
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
seg = felzenszwalb(img, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
assert_array_equal(seg[:10, :10], 0)
assert_array_equal(seg[10:, :10], 2)
assert_array_equal(seg[:10, 10:], 1)
assert_array_equal(seg[10:, 10:], 3)
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
| bsd-3-clause |
thomasquintana/jobber | jobber/core/actor/mailbox.py | 1 | 1271 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Thomas Quintana <quintana.thomas@gmail.com>
from collections import deque
class Mailbox(object):
"""
Implements the Mailbox
Ensures FIFO
"""
def __init__(self):
self._box = deque()
def append(self, message):
self._box.append(message)
def first(self):
return self._box[0]
def pop(self):
return self._box.popleft()
def flush(self):
self._box.clear()
def __len__(self):
return len(self._box)
| apache-2.0 |
Hoekz/hackness-monster | venv/lib/python2.7/site-packages/pip/vcs/bazaar.py | 514 | 3803 | from __future__ import absolute_import
import logging
import os
import tempfile
# TODO: Get this into six.moves.urllib.parse
try:
from urllib import parse as urllib_parse
except ImportError:
import urlparse as urllib_parse
from pip.utils import rmtree, display_path
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
logger = logging.getLogger(__name__)
class Bazaar(VersionControl):
name = 'bzr'
dirname = '.bzr'
repo_name = 'branch'
schemes = (
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
'bzr+lp',
)
def __init__(self, url=None, *args, **kwargs):
super(Bazaar, self).__init__(url, *args, **kwargs)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
# Register lp but do not expose as a scheme to support bzr+lp.
if getattr(urllib_parse, 'uses_fragment', None):
urllib_parse.uses_fragment.extend(['lp'])
urllib_parse.non_hierarchical.extend(['lp'])
def export(self, location):
"""
Export the Bazaar repository at the url to the destination location
"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
if os.path.exists(location):
# Remove the location to make sure Bazaar can export it correctly
rmtree(location)
try:
self.run_command(['export', location], cwd=temp_dir,
show_stdout=False)
finally:
rmtree(temp_dir)
def switch(self, dest, url, rev_options):
self.run_command(['switch', url], cwd=dest)
def update(self, dest, rev_options):
self.run_command(['pull', '-q'] + rev_options, cwd=dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = ['-r', rev]
rev_display = ' (to revision %s)' % rev
else:
rev_options = []
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['branch', '-q'] + rev_options + [url, dest])
def get_url_rev(self):
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
url, rev = super(Bazaar, self).get_url_rev()
if url.startswith('ssh://'):
url = 'bzr+' + url
return url, rev
def get_url(self, location):
urls = self.run_command(['info'], show_stdout=False, cwd=location)
for line in urls.splitlines():
line = line.strip()
for x in ('checkout of branch: ',
'parent branch: '):
if line.startswith(x):
repo = line.split(x)[1]
if self._is_local_repository(repo):
return path_to_url(repo)
return repo
return None
def get_revision(self, location):
revision = self.run_command(
['revno'], show_stdout=False, cwd=location)
return revision.splitlines()[-1]
def get_src_requirement(self, dist, location):
repo = self.get_url(location)
if not repo:
return None
if not repo.lower().startswith('bzr:'):
repo = 'bzr+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
current_rev = self.get_revision(location)
return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
def check_version(self, dest, rev_options):
"""Always assume the versions don't match"""
return False
vcs.register(Bazaar)
| mit |
russelmahmud/mess-account | django/contrib/sessions/backends/db.py | 232 | 2756 | import datetime
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation
from django.db import IntegrityError, transaction, router
from django.utils.encoding import force_unicode
class SessionStore(SessionBase):
"""
Implements database session store.
"""
def __init__(self, session_key=None):
super(SessionStore, self).__init__(session_key)
def load(self):
try:
s = Session.objects.get(
session_key = self.session_key,
expire_date__gt=datetime.datetime.now()
)
return self.decode(force_unicode(s.session_data))
except (Session.DoesNotExist, SuspiciousOperation):
self.create()
return {}
def exists(self, session_key):
try:
Session.objects.get(session_key=session_key)
except Session.DoesNotExist:
return False
return True
def create(self):
while True:
self.session_key = self._get_new_session_key()
try:
# Save immediately to ensure we have a unique entry in the
# database.
self.save(must_create=True)
except CreateError:
# Key wasn't unique. Try again.
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
"""
Saves the current session data to the database. If 'must_create' is
True, a database error will be raised if the saving operation doesn't
create a *new* entry (as opposed to possibly updating an existing
entry).
"""
obj = Session(
session_key = self.session_key,
session_data = self.encode(self._get_session(no_load=must_create)),
expire_date = self.get_expiry_date()
)
using = router.db_for_write(Session, instance=obj)
sid = transaction.savepoint(using=using)
try:
obj.save(force_insert=must_create, using=using)
except IntegrityError:
if must_create:
transaction.savepoint_rollback(sid, using=using)
raise CreateError
raise
def delete(self, session_key=None):
if session_key is None:
if self._session_key is None:
return
session_key = self._session_key
try:
Session.objects.get(session_key=session_key).delete()
except Session.DoesNotExist:
pass
# At bottom to avoid circular import
from django.contrib.sessions.models import Session
| bsd-3-clause |
huggingface/pytorch-transformers | src/transformers/modeling_flax_pytorch_utils.py | 1 | 9873 | # coding=utf-8
# Copyright 2021 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch - Flax general utilities."""
import os
from pickle import UnpicklingError
import numpy as np
import jax.numpy as jnp
import transformers
from flax.serialization import from_bytes
from flax.traverse_util import flatten_dict, unflatten_dict
from .utils import logging
logger = logging.get_logger(__name__)
#####################
# PyTorch => Flax #
#####################
def load_pytorch_checkpoint_in_flax_state_dict(flax_model, pytorch_checkpoint_path, allow_missing_keys=False):
"""Load pytorch checkpoints in a flax model"""
try:
import torch # noqa: F401
except ImportError:
logger.error(
"Loading a PyTorch model in Flax, requires both PyTorch and Flax to be installed. Please see "
"https://pytorch.org/ and https://flax.readthedocs.io/en/latest/installation.html for installation instructions."
)
raise
pt_path = os.path.abspath(pytorch_checkpoint_path)
logger.info(f"Loading PyTorch weights from {pt_path}")
pt_state_dict = torch.load(pt_path, map_location="cpu")
logger.info(f"PyTorch checkpoint contains {sum(t.numel() for t in pt_state_dict.values()):,} parameters.")
flax_state_dict = convert_pytorch_state_dict_to_flax(pt_state_dict, flax_model)
return flax_state_dict
def convert_pytorch_state_dict_to_flax(pt_state_dict, flax_model):
# convert pytorch tensor to numpy
pt_state_dict = {k: v.numpy() for k, v in pt_state_dict.items()}
random_flax_state_dict = flatten_dict(flax_model.params)
flax_state_dict = {}
remove_base_model_prefix = (flax_model.base_model_prefix not in flax_model.params) and (
flax_model.base_model_prefix in set([k.split(".")[0] for k in pt_state_dict.keys()])
)
add_base_model_prefix = (flax_model.base_model_prefix in flax_model.params) and (
flax_model.base_model_prefix not in set([k.split(".")[0] for k in pt_state_dict.keys()])
)
# Need to change some parameters name to match Flax names so that we don't have to fork any layer
for pt_key, pt_tensor in pt_state_dict.items():
pt_tuple_key = tuple(pt_key.split("."))
has_base_model_prefix = pt_tuple_key[0] == flax_model.base_model_prefix
require_base_model_prefix = (flax_model.base_model_prefix,) + pt_tuple_key in random_flax_state_dict
if remove_base_model_prefix and has_base_model_prefix:
pt_tuple_key = pt_tuple_key[1:]
elif add_base_model_prefix and require_base_model_prefix:
pt_tuple_key = (flax_model.base_model_prefix,) + pt_tuple_key
# Correctly rename weight parameters
if pt_tuple_key[-1] in ["weight", "gamma"] and pt_tuple_key[:-1] + ("scale",) in random_flax_state_dict:
pt_tuple_key = pt_tuple_key[:-1] + ("scale",)
if pt_tuple_key[-1] == "weight" and pt_tuple_key[:-1] + ("embedding",) in random_flax_state_dict:
pt_tuple_key = pt_tuple_key[:-1] + ("embedding",)
elif pt_tuple_key[-1] == "weight" and pt_tuple_key not in random_flax_state_dict:
pt_tuple_key = pt_tuple_key[:-1] + ("kernel",)
pt_tensor = pt_tensor.T
elif pt_tuple_key[-1] == "gamma":
pt_tuple_key = pt_tuple_key[:-1] + ("weight",)
elif pt_tuple_key[-1] == "beta":
pt_tuple_key = pt_tuple_key[:-1] + ("bias",)
if pt_tuple_key in random_flax_state_dict:
if pt_tensor.shape != random_flax_state_dict[pt_tuple_key].shape:
raise ValueError(
"PyTorch checkpoint seems to be incorrect. Weight {pt_key} was expected to be of shape {random_flax_state_dict[pt_tuple_key].shape}, but is {pt_tensor.shape}."
)
# also add unexpected weight so that warning is thrown
flax_state_dict[pt_tuple_key] = jnp.asarray(pt_tensor)
return unflatten_dict(flax_state_dict)
#####################
# Flax => PyTorch #
#####################
def load_flax_checkpoint_in_pytorch_model(model, flax_checkpoint_path):
"""Load flax checkpoints in a PyTorch model"""
flax_checkpoint_path = os.path.abspath(flax_checkpoint_path)
logger.info(f"Loading Flax weights from {flax_checkpoint_path}")
# import correct flax class
flax_cls = getattr(transformers, "Flax" + model.__class__.__name__)
# load flax weight dict
with open(flax_checkpoint_path, "rb") as state_f:
try:
flax_state_dict = from_bytes(flax_cls, state_f.read())
except UnpicklingError:
raise EnvironmentError(f"Unable to convert {flax_checkpoint_path} to Flax deserializable object. ")
return load_flax_weights_in_pytorch_model(model, flax_state_dict)
def load_flax_weights_in_pytorch_model(pt_model, flax_state):
"""Load flax checkpoints in a PyTorch model"""
try:
import torch # noqa: F401
except ImportError:
logger.error(
"Loading a Flax weights in PyTorch, requires both PyTorch and Flax to be installed. Please see "
"https://pytorch.org/ and https://flax.readthedocs.io/en/latest/installation.html for installation instructions."
)
raise
flax_state_dict = flatten_dict(flax_state)
pt_model_dict = pt_model.state_dict()
remove_base_model_prefix = (pt_model.base_model_prefix in flax_state) and (
pt_model.base_model_prefix not in set([k.split(".")[0] for k in pt_model_dict.keys()])
)
add_base_model_prefix = (pt_model.base_model_prefix not in flax_state) and (
pt_model.base_model_prefix in set([k.split(".")[0] for k in pt_model_dict.keys()])
)
# keep track of unexpected & missing keys
unexpected_keys = []
missing_keys = set(pt_model_dict.keys())
for flax_key_tuple, flax_tensor in flax_state_dict.items():
has_base_model_prefix = flax_key_tuple[0] == pt_model.base_model_prefix
require_base_model_prefix = ".".join((pt_model.base_model_prefix,) + flax_key_tuple) in pt_model_dict
# adapt flax_key to prepare for loading from/to base model only
if remove_base_model_prefix and has_base_model_prefix:
flax_key_tuple = flax_key_tuple[1:]
elif add_base_model_prefix and require_base_model_prefix:
flax_key_tuple = (pt_model.base_model_prefix,) + flax_key_tuple
# rename flax weights to PyTorch format
if flax_key_tuple[-1] == "kernel" and ".".join(flax_key_tuple) not in pt_model_dict:
flax_key_tuple = flax_key_tuple[:-1] + ("weight",)
flax_tensor = flax_tensor.T
elif flax_key_tuple[-1] in ["scale", "embedding"]:
flax_key_tuple = flax_key_tuple[:-1] + ("weight",)
flax_key = ".".join(flax_key_tuple)
if flax_key in pt_model_dict:
if flax_tensor.shape != pt_model_dict[flax_key].shape:
raise ValueError(
f"Flax checkpoint seems to be incorrect. Weight {flax_key_tuple} was expected"
f"to be of shape {pt_model_dict[flax_key].shape}, but is {flax_tensor.shape}."
)
else:
# add weight to pytorch dict
flax_tensor = np.asarray(flax_tensor) if not isinstance(flax_tensor, np.ndarray) else flax_tensor
pt_model_dict[flax_key] = torch.from_numpy(flax_tensor)
# remove from missing keys
missing_keys.remove(flax_key)
else:
# weight is not expected by PyTorch model
unexpected_keys.append(flax_key)
pt_model.load_state_dict(pt_model_dict)
# re-transform missing_keys to list
missing_keys = list(missing_keys)
if len(unexpected_keys) > 0:
logger.warning(
"Some weights of the Flax model were not used when "
f"initializing the PyTorch model {pt_model.__class__.__name__}: {unexpected_keys}\n"
f"- This IS expected if you are initializing {pt_model.__class__.__name__} from a Flax model trained on another task "
"or with another architecture (e.g. initializing a BertForSequenceClassification model from a FlaxBertForPreTraining model).\n"
f"- This IS NOT expected if you are initializing {pt_model.__class__.__name__} from a Flax model that you expect "
"to be exactly identical (e.g. initializing a BertForSequenceClassification model from a FlaxBertForSequenceClassification model)."
)
else:
logger.warning(f"All Flax model weights were used when initializing {pt_model.__class__.__name__}.\n")
if len(missing_keys) > 0:
logger.warning(
f"Some weights of {pt_model.__class__.__name__} were not initialized from the Flax model "
f"and are newly initialized: {missing_keys}\n"
"You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference."
)
else:
logger.warning(
f"All the weights of {pt_model.__class__.__name__} were initialized from the Flax model.\n"
"If your task is similar to the task the model of the checkpoint was trained on, "
f"you can already use {pt_model.__class__.__name__} for predictions without further training."
)
return pt_model
| apache-2.0 |
Erethon/synnefo | snf-cyclades-app/synnefo/volume/util.py | 2 | 3867 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from synnefo.db import models
from snf_django.lib.api import faults
from synnefo.api.util import get_image_dict, get_vm
from synnefo.plankton import backend
from synnefo.cyclades_settings import cyclades_services, BASE_HOST
from synnefo.lib import join_urls
from synnefo.lib.services import get_service_path
def get_volume(user_id, volume_id, for_update=False,
non_deleted=False,
exception=faults.ItemNotFound):
volumes = models.Volume.objects
if for_update:
volumes = volumes.select_for_update()
try:
volume_id = int(volume_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_id)
try:
volume = volumes.get(id=volume_id, userid=user_id)
if non_deleted and volume.deleted:
raise faults.BadRequest("Volume '%s' has been deleted."
% volume_id)
return volume
except models.Volume.DoesNotExist:
raise exception("Volume %s not found" % volume_id)
def get_volume_type(volume_type_id, for_update=False, include_deleted=False,
exception=faults.ItemNotFound):
vtypes = models.VolumeType.objects
if not include_deleted:
vtypes = vtypes.filter(deleted=False)
if for_update:
vtypes = vtypes.select_for_update()
try:
vtype_id = int(volume_type_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_type_id)
try:
return vtypes.get(id=vtype_id)
except models.VolumeType.DoesNotExist:
raise exception("Volume type %s not found" % vtype_id)
def get_snapshot(user_id, snapshot_id, exception=faults.ItemNotFound):
try:
with backend.PlanktonBackend(user_id) as b:
return b.get_snapshot(snapshot_id)
except faults.ItemNotFound:
raise exception("Snapshot %s not found" % snapshot_id)
def get_image(user_id, image_id, exception=faults.ItemNotFound):
try:
return get_image_dict(image_id, user_id)
except faults.ItemNotFound:
raise exception("Image %s not found" % image_id)
def get_server(user_id, server_id, for_update=False, non_deleted=False,
exception=faults.ItemNotFound):
try:
server_id = int(server_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid server id: %s" % server_id)
try:
return get_vm(server_id, user_id, for_update=for_update,
non_deleted=non_deleted, non_suspended=True)
except faults.ItemNotFound:
raise exception("Server %s not found" % server_id)
VOLUME_URL = \
join_urls(BASE_HOST,
get_service_path(cyclades_services, "volume", version="v2.0"))
VOLUMES_URL = join_urls(VOLUME_URL, "volumes/")
SNAPSHOTS_URL = join_urls(VOLUME_URL, "snapshots/")
def volume_to_links(volume_id):
href = join_urls(VOLUMES_URL, str(volume_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
def snapshot_to_links(snapshot_id):
href = join_urls(SNAPSHOTS_URL, str(snapshot_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
| gpl-3.0 |
yglazko/socorro | socorro/unittest/testlib/testLoggerForTest.py | 11 | 5192 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from socorro.unittest.testlib.loggerForTest import TestingLogger
import logging
class BogusLogger:
def __init__(self):
self.item = None
def log(self,level,message,*args):
self.item = (level,message,args)
def testConstructor():
tl = TestingLogger()
bl = BogusLogger()
assert None == tl.logger
assert 6 == len(tl.levelcode)
expected = {0:'NOTSET',10:'DEBUG',20:'INFO',30:'WARNING',40:'ERROR',50:'FATAL'}
for c in expected.keys():
assert expected[c] == tl.levelcode[c], 'But at %s expected %s got %s'%(c,expected[c],lc.levelcode[c])
tl = TestingLogger(bl)
assert bl is tl.logger
assert None == bl.item
def testLog():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
for level in range(0,60,10):
tl.log(level,'message')
tlb.log(level,'message')
assert 'message' == tl.buffer[-1]
assert level == tl.levels[-1]
assert (level,'message',()) == bl.item
tl = TestingLogger()
tlb = TestingLogger(bl)
for level in range(0,60,10):
tl.log(level,'message %s %s','one','two')
tlb.log(level,'message %s %s','one','two')
assert 'message one two' == tl.buffer[-1]
assert (level,'message %s %s',('one','two')) == bl.item
def testDebug():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.debug("bug")
tlb.debug("bug")
assert (logging.DEBUG,'bug',()) == bl.item
assert logging.DEBUG == tl.levels[0]
assert logging.DEBUG == tlb.levels[0]
assert 'bug' == tl.buffer[0]
assert 'bug' == tlb.buffer[0]
def testInfo():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.info("info")
tlb.info("info")
assert (logging.INFO,'info',()) == bl.item
assert logging.INFO == tl.levels[0]
assert logging.INFO == tlb.levels[0]
assert 'info' == tl.buffer[0]
assert 'info' == tlb.buffer[0]
def testWarning():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.warning("warning")
tlb.warning("warning")
assert (logging.WARNING,'warning',()) == bl.item
assert logging.WARNING == tl.levels[0]
assert logging.WARNING == tlb.levels[0]
assert 'warning' == tl.buffer[0]
assert 'warning' == tlb.buffer[0]
def testWarn():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.warn("warn")
tlb.warn("warn")
assert (logging.WARN,'warn',()) == bl.item
assert logging.WARN == tl.levels[0]
assert logging.WARN == tlb.levels[0]
assert 'warn' == tl.buffer[0]
assert 'warn' == tlb.buffer[0]
def testError():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.error("error")
tlb.error("error")
assert (logging.ERROR,'error',()) == bl.item
assert logging.ERROR == tl.levels[0]
assert logging.ERROR == tlb.levels[0]
assert 'error' == tl.buffer[0]
assert 'error' == tlb.buffer[0]
def testCritical():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.critical("critical")
tlb.critical("critical")
assert (logging.CRITICAL,'critical',()) == bl.item
assert logging.CRITICAL == tl.levels[0]
assert logging.CRITICAL == tlb.levels[0]
assert 'critical' == tl.buffer[0]
assert 'critical' == tlb.buffer[0]
def testFatal():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.fatal("fatal")
tlb.fatal("fatal")
assert (logging.FATAL,'fatal',()) == bl.item
assert logging.FATAL == tl.levels[0]
assert logging.FATAL == tlb.levels[0]
assert 'fatal' == tl.buffer[0]
assert 'fatal' == tlb.buffer[0]
def testStrFunction():
tl = TestingLogger()
assert '' == str(tl)
tl.debug('debug')
expLines = ['DEBUG (10): debug']
tl.info('info')
expLines.append('INFO (20): info')
tl.warn('warn')
expLines.append('WARNING (30): warn')
tl.warning('warning')
expLines.append('WARNING (30): warning')
tl.error('error')
expLines.append('ERROR (40): error')
tl.critical('critical')
expLines.append('FATAL (50): critical')
tl.fatal('fatal')
expLines.append('FATAL (50): fatal')
expected = "\n".join(expLines)
assert expected == str(tl)
def testLenFunction():
tl = TestingLogger()
exp = 0
assert exp == len(tl)
tl.debug('woo')
exp += 1
assert exp == len(tl)
tl.info('woo')
exp += 1
assert exp == len(tl)
tl.warning('woo')
exp += 1
assert exp == len(tl)
tl.warn('woo')
exp += 1
assert exp == len(tl)
tl.error('woo')
exp += 1
assert exp == len(tl)
tl.critical('woo')
exp += 1
assert exp == len(tl)
tl.fatal('woo')
exp += 1
assert exp == len(tl)
def testClear():
tl = TestingLogger()
tl.clear()
assert 0 == len(tl)
assert 0 == len(tl.levels)
assert 0 == len(tl.buffer)
tl.debug('woo')
tl.info('woo')
tl.warning('woo')
tl.warn('woo')
tl.error('woo')
tl.critical('woo')
tl.fatal('woo')
assert 7 == len(tl)
assert 7 == len(tl.levels)
assert 7 == len(tl.buffer)
tl.clear()
assert 0 == len(tl)
assert 0 == len(tl.levels)
assert 0 == len(tl.buffer)
#def testFormatOne(): handled by testStrFunction()
| mpl-2.0 |
jumpstarter-io/cinder | cinder/quota.py | 6 | 37954 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Quotas for volumes."""
import datetime
from oslo.config import cfg
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder.openstack.common import importutils
from cinder.openstack.common import log as logging
from cinder.openstack.common import timeutils
LOG = logging.getLogger(__name__)
quota_opts = [
cfg.IntOpt('quota_volumes',
default=10,
help='Number of volumes allowed per project'),
cfg.IntOpt('quota_snapshots',
default=10,
help='Number of volume snapshots allowed per project'),
cfg.IntOpt('quota_consistencygroups',
default=10,
help='Number of consistencygroups allowed per project'),
cfg.IntOpt('quota_gigabytes',
default=1000,
help='Total amount of storage, in gigabytes, allowed '
'for volumes and snapshots per project'),
cfg.IntOpt('quota_backups',
default=10,
help='Number of volume backups allowed per project'),
cfg.IntOpt('quota_backup_gigabytes',
default=1000,
help='Total amount of storage, in gigabytes, allowed '
'for backups per project'),
cfg.IntOpt('reservation_expire',
default=86400,
help='Number of seconds until a reservation expires'),
cfg.IntOpt('until_refresh',
default=0,
help='Count of reservations until usage is refreshed'),
cfg.IntOpt('max_age',
default=0,
help='Number of seconds between subsequent usage refreshes'),
cfg.StrOpt('quota_driver',
default='cinder.quota.DbQuotaDriver',
help='Default driver to use for quota checks'),
cfg.BoolOpt('use_default_quota_class',
default=True,
help='Enables or disables use of default quota class '
'with default quota.'), ]
CONF = cfg.CONF
CONF.register_opts(quota_opts)
class DbQuotaDriver(object):
"""Driver to perform check to enforcement of quotas.
Also allows to obtain quota information.
The default driver utilizes the local database.
"""
def get_by_project(self, context, project_id, resource_name):
"""Get a specific quota by project."""
return db.quota_get(context, project_id, resource_name)
def get_by_class(self, context, quota_class, resource_name):
"""Get a specific quota by quota class."""
return db.quota_class_get(context, quota_class, resource_name)
def get_default(self, context, resource):
"""Get a specific default quota for a resource."""
default_quotas = db.quota_class_get_default(context)
return default_quotas.get(resource.name, resource.default)
def get_defaults(self, context, resources):
"""Given a list of resources, retrieve the default quotas.
Use the class quotas named `_DEFAULT_QUOTA_NAME` as default quotas,
if it exists.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
"""
quotas = {}
default_quotas = {}
if CONF.use_default_quota_class:
default_quotas = db.quota_class_get_default(context)
for resource in resources.values():
if resource.name not in default_quotas:
LOG.deprecated(_("Default quota for resource: %(res)s is set "
"by the default quota flag: quota_%(res)s, "
"it is now deprecated. Please use the "
"default quota class for default "
"quota.") % {'res': resource.name})
quotas[resource.name] = default_quotas.get(resource.name,
resource.default)
return quotas
def get_class_quotas(self, context, resources, quota_class,
defaults=True):
"""Given list of resources, retrieve the quotas for given quota class.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param quota_class: The name of the quota class to return
quotas for.
:param defaults: If True, the default value will be reported
if there is no specific value for the
resource.
"""
quotas = {}
default_quotas = {}
class_quotas = db.quota_class_get_all_by_name(context, quota_class)
if defaults:
default_quotas = db.quota_class_get_default(context)
for resource in resources.values():
if resource.name in class_quotas:
quotas[resource.name] = class_quotas[resource.name]
continue
if defaults:
quotas[resource.name] = default_quotas.get(resource.name,
resource.default)
return quotas
def get_project_quotas(self, context, resources, project_id,
quota_class=None, defaults=True,
usages=True):
"""Given a list of resources, retrieve the quotas for the given
project.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param project_id: The ID of the project to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified. It
will be ignored if project_id ==
context.project_id.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
"""
quotas = {}
project_quotas = db.quota_get_all_by_project(context, project_id)
if usages:
project_usages = db.quota_usage_get_all_by_project(context,
project_id)
# Get the quotas for the appropriate class. If the project ID
# matches the one in the context, we use the quota_class from
# the context, otherwise, we use the provided quota_class (if
# any)
if project_id == context.project_id:
quota_class = context.quota_class
if quota_class:
class_quotas = db.quota_class_get_all_by_name(context, quota_class)
else:
class_quotas = {}
default_quotas = self.get_defaults(context, resources)
for resource in resources.values():
# Omit default/quota class values
if not defaults and resource.name not in project_quotas:
continue
quotas[resource.name] = dict(
limit=project_quotas.get(
resource.name,
class_quotas.get(resource.name,
default_quotas[resource.name])),
)
# Include usages if desired. This is optional because one
# internal consumer of this interface wants to access the
# usages directly from inside a transaction.
if usages:
usage = project_usages.get(resource.name, {})
quotas[resource.name].update(
in_use=usage.get('in_use', 0),
reserved=usage.get('reserved', 0), )
return quotas
def _get_quotas(self, context, resources, keys, has_sync, project_id=None):
"""A helper method which retrieves the quotas for specific resources.
This specific resource is identified by keys, and which apply to the
current context.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param keys: A list of the desired quotas to retrieve.
:param has_sync: If True, indicates that the resource must
have a sync attribute; if False, indicates
that the resource must NOT have a sync
attribute.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# Filter resources
if has_sync:
sync_filt = lambda x: hasattr(x, 'sync')
else:
sync_filt = lambda x: not hasattr(x, 'sync')
desired = set(keys)
sub_resources = dict((k, v) for k, v in resources.items()
if k in desired and sync_filt(v))
# Make sure we accounted for all of them...
if len(keys) != len(sub_resources):
unknown = desired - set(sub_resources.keys())
raise exception.QuotaResourceUnknown(unknown=sorted(unknown))
# Grab and return the quotas (without usages)
quotas = self.get_project_quotas(context, sub_resources,
project_id,
context.quota_class, usages=False)
return dict((k, v['limit']) for k, v in quotas.items())
def limit_check(self, context, resources, values, project_id=None):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param values: A dictionary of the values to check against the
quota.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# Ensure no value is less than zero
unders = [key for key, val in values.items() if val < 0]
if unders:
raise exception.InvalidQuotaValue(unders=sorted(unders))
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# Get the applicable quotas
quotas = self._get_quotas(context, resources, values.keys(),
has_sync=False, project_id=project_id)
# Check the quotas and construct a list of the resources that
# would be put over limit by the desired values
overs = [key for key, val in values.items()
if quotas[key] >= 0 and quotas[key] < val]
if overs:
raise exception.OverQuota(overs=sorted(overs), quotas=quotas,
usages={})
def reserve(self, context, resources, deltas, expire=None,
project_id=None):
"""Check quotas and reserve resources.
For counting quotas--those quotas for which there is a usage
synchronization function--this method checks quotas against
current usage and the desired deltas.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it does not have a usage
synchronization function.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns a
list of reservation UUIDs which were created.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param deltas: A dictionary of the proposed delta changes.
:param expire: An optional parameter specifying an expiration
time for the reservations. If it is a simple
number, it is interpreted as a number of
seconds and added to the current time; if it is
a datetime.timedelta object, it will also be
added to the current time. A datetime.datetime
object will be interpreted as the absolute
expiration time. If None is specified, the
default expiration time set by
--default-reservation-expire will be used (this
value will be treated as a number of seconds).
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# Set up the reservation expiration
if expire is None:
expire = CONF.reservation_expire
if isinstance(expire, (int, long)):
expire = datetime.timedelta(seconds=expire)
if isinstance(expire, datetime.timedelta):
expire = timeutils.utcnow() + expire
if not isinstance(expire, datetime.datetime):
raise exception.InvalidReservationExpiration(expire=expire)
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# Get the applicable quotas.
# NOTE(Vek): We're not worried about races at this point.
# Yes, the admin may be in the process of reducing
# quotas, but that's a pretty rare thing.
quotas = self._get_quotas(context, resources, deltas.keys(),
has_sync=True, project_id=project_id)
# NOTE(Vek): Most of the work here has to be done in the DB
# API, because we have to do it in a transaction,
# which means access to the session. Since the
# session isn't available outside the DBAPI, we
# have to do the work there.
return db.quota_reserve(context, resources, quotas, deltas, expire,
CONF.until_refresh, CONF.max_age,
project_id=project_id)
def commit(self, context, reservations, project_id=None):
"""Commit reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
db.reservation_commit(context, reservations, project_id=project_id)
def rollback(self, context, reservations, project_id=None):
"""Roll back reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
db.reservation_rollback(context, reservations, project_id=project_id)
def destroy_all_by_project(self, context, project_id):
"""Destroy all that is associated with a project.
This includes quotas, usages and reservations.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
"""
db.quota_destroy_all_by_project(context, project_id)
def expire(self, context):
"""Expire reservations.
Explores all currently existing reservations and rolls back
any that have expired.
:param context: The request context, for access checks.
"""
db.reservation_expire(context)
class BaseResource(object):
"""Describe a single resource for quota checking."""
def __init__(self, name, flag=None):
"""Initializes a Resource.
:param name: The name of the resource, i.e., "volumes".
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
self.name = name
self.flag = flag
def quota(self, driver, context, **kwargs):
"""Given a driver and context, obtain the quota for this resource.
:param driver: A quota driver.
:param context: The request context.
:param project_id: The project to obtain the quota value for.
If not provided, it is taken from the
context. If it is given as None, no
project-specific quota will be searched
for.
:param quota_class: The quota class corresponding to the
project, or for which the quota is to be
looked up. If not provided, it is taken
from the context. If it is given as None,
no quota class-specific quota will be
searched for. Note that the quota class
defaults to the value in the context,
which may not correspond to the project if
project_id is not the same as the one in
the context.
"""
# Get the project ID
project_id = kwargs.get('project_id', context.project_id)
# Ditto for the quota class
quota_class = kwargs.get('quota_class', context.quota_class)
# Look up the quota for the project
if project_id:
try:
return driver.get_by_project(context, project_id, self.name)
except exception.ProjectQuotaNotFound:
pass
# Try for the quota class
if quota_class:
try:
return driver.get_by_class(context, quota_class, self.name)
except exception.QuotaClassNotFound:
pass
# OK, return the default
return driver.get_default(context, self)
@property
def default(self):
"""Return the default value of the quota."""
return CONF[self.flag] if self.flag else -1
class ReservableResource(BaseResource):
"""Describe a reservable resource."""
def __init__(self, name, sync, flag=None):
"""Initializes a ReservableResource.
Reservable resources are those resources which directly
correspond to objects in the database, i.e., volumes, gigabytes,
etc. A ReservableResource must be constructed with a usage
synchronization function, which will be called to determine the
current counts of one or more resources.
The usage synchronization function will be passed three
arguments: an admin context, the project ID, and an opaque
session object, which should in turn be passed to the
underlying database function. Synchronization functions
should return a dictionary mapping resource names to the
current in_use count for those resources; more than one
resource and resource count may be returned. Note that
synchronization functions may be associated with more than one
ReservableResource.
:param name: The name of the resource, i.e., "volumes".
:param sync: A dbapi methods name which returns a dictionary
to resynchronize the in_use count for one or more
resources, as described above.
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
super(ReservableResource, self).__init__(name, flag=flag)
self.sync = sync
class AbsoluteResource(BaseResource):
"""Describe a non-reservable resource."""
pass
class CountableResource(AbsoluteResource):
"""Describe a resource where counts aren't based only on the project ID."""
def __init__(self, name, count, flag=None):
"""Initializes a CountableResource.
Countable resources are those resources which directly
correspond to objects in the database, i.e., volumes, gigabytes,
etc., but for which a count by project ID is inappropriate. A
CountableResource must be constructed with a counting
function, which will be called to determine the current counts
of the resource.
The counting function will be passed the context, along with
the extra positional and keyword arguments that are passed to
Quota.count(). It should return an integer specifying the
count.
Note that this counting is not performed in a transaction-safe
manner. This resource class is a temporary measure to provide
required functionality, until a better approach to solving
this problem can be evolved.
:param name: The name of the resource, i.e., "volumes".
:param count: A callable which returns the count of the
resource. The arguments passed are as described
above.
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
super(CountableResource, self).__init__(name, flag=flag)
self.count = count
class VolumeTypeResource(ReservableResource):
"""ReservableResource for a specific volume type."""
def __init__(self, part_name, volume_type):
"""Initializes a VolumeTypeResource.
:param part_name: The kind of resource, i.e., "volumes".
:param volume_type: The volume type for this resource.
"""
self.volume_type_name = volume_type['name']
self.volume_type_id = volume_type['id']
name = "%s_%s" % (part_name, self.volume_type_name)
super(VolumeTypeResource, self).__init__(name, "_sync_%s" % part_name)
class QuotaEngine(object):
"""Represent the set of recognized quotas."""
def __init__(self, quota_driver_class=None):
"""Initialize a Quota object."""
if not quota_driver_class:
quota_driver_class = CONF.quota_driver
if isinstance(quota_driver_class, basestring):
quota_driver_class = importutils.import_object(quota_driver_class)
self._resources = {}
self._driver = quota_driver_class
def __contains__(self, resource):
return resource in self.resources
def register_resource(self, resource):
"""Register a resource."""
self._resources[resource.name] = resource
def register_resources(self, resources):
"""Register a list of resources."""
for resource in resources:
self.register_resource(resource)
def get_by_project(self, context, project_id, resource_name):
"""Get a specific quota by project."""
return self._driver.get_by_project(context, project_id, resource_name)
def get_by_class(self, context, quota_class, resource_name):
"""Get a specific quota by quota class."""
return self._driver.get_by_class(context, quota_class, resource_name)
def get_default(self, context, resource):
"""Get a specific default quota for a resource."""
return self._driver.get_default(context, resource)
def get_defaults(self, context):
"""Retrieve the default quotas.
:param context: The request context, for access checks.
"""
return self._driver.get_defaults(context, self.resources)
def get_class_quotas(self, context, quota_class, defaults=True):
"""Retrieve the quotas for the given quota class.
:param context: The request context, for access checks.
:param quota_class: The name of the quota class to return
quotas for.
:param defaults: If True, the default value will be reported
if there is no specific value for the
resource.
"""
return self._driver.get_class_quotas(context, self.resources,
quota_class, defaults=defaults)
def get_project_quotas(self, context, project_id, quota_class=None,
defaults=True, usages=True):
"""Retrieve the quotas for the given project.
:param context: The request context, for access checks.
:param project_id: The ID of the project to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
"""
return self._driver.get_project_quotas(context, self.resources,
project_id,
quota_class=quota_class,
defaults=defaults,
usages=usages)
def count(self, context, resource, *args, **kwargs):
"""Count a resource.
For countable resources, invokes the count() function and
returns its result. Arguments following the context and
resource are passed directly to the count function declared by
the resource.
:param context: The request context, for access checks.
:param resource: The name of the resource, as a string.
"""
# Get the resource
res = self.resources.get(resource)
if not res or not hasattr(res, 'count'):
raise exception.QuotaResourceUnknown(unknown=[resource])
return res.count(context, *args, **kwargs)
def limit_check(self, context, project_id=None, **values):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction. The
values to check are given as keyword arguments, where the key
identifies the specific quota limit to check, and the value is
the proposed value.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
return self._driver.limit_check(context, self.resources, values,
project_id=project_id)
def reserve(self, context, expire=None, project_id=None, **deltas):
"""Check quotas and reserve resources.
For counting quotas--those quotas for which there is a usage
synchronization function--this method checks quotas against
current usage and the desired deltas. The deltas are given as
keyword arguments, and current usage and other reservations
are factored into the quota check.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it does not have a usage
synchronization function.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns a
list of reservation UUIDs which were created.
:param context: The request context, for access checks.
:param expire: An optional parameter specifying an expiration
time for the reservations. If it is a simple
number, it is interpreted as a number of
seconds and added to the current time; if it is
a datetime.timedelta object, it will also be
added to the current time. A datetime.datetime
object will be interpreted as the absolute
expiration time. If None is specified, the
default expiration time set by
--default-reservation-expire will be used (this
value will be treated as a number of seconds).
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
reservations = self._driver.reserve(context, self.resources, deltas,
expire=expire,
project_id=project_id)
LOG.debug("Created reservations %s" % reservations)
return reservations
def commit(self, context, reservations, project_id=None):
"""Commit reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
try:
self._driver.commit(context, reservations, project_id=project_id)
except Exception:
# NOTE(Vek): Ignoring exceptions here is safe, because the
# usage resynchronization and the reservation expiration
# mechanisms will resolve the issue. The exception is
# logged, however, because this is less than optimal.
LOG.exception(_("Failed to commit reservations %s") % reservations)
def rollback(self, context, reservations, project_id=None):
"""Roll back reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
try:
self._driver.rollback(context, reservations, project_id=project_id)
except Exception:
# NOTE(Vek): Ignoring exceptions here is safe, because the
# usage resynchronization and the reservation expiration
# mechanisms will resolve the issue. The exception is
# logged, however, because this is less than optimal.
LOG.exception(_("Failed to roll back reservations "
"%s") % reservations)
def destroy_all_by_project(self, context, project_id):
"""Destroy all quotas, usages, and reservations associated with a
project.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
"""
self._driver.destroy_all_by_project(context, project_id)
def expire(self, context):
"""Expire reservations.
Explores all currently existing reservations and rolls back
any that have expired.
:param context: The request context, for access checks.
"""
self._driver.expire(context)
def add_volume_type_opts(self, context, opts, volume_type_id):
"""Add volume type resource options.
Adds elements to the opts hash for volume type quotas.
If a resource is being reserved ('gigabytes', etc) and the volume
type is set up for its own quotas, these reservations are copied
into keys for 'gigabytes_<volume type name>', etc.
:param context: The request context, for access checks.
:param opts: The reservations options hash.
:param volume_type_id: The volume type id for this reservation.
"""
if not volume_type_id:
return
# NOTE(jdg): set inactive to True in volume_type_get, as we
# may be operating on a volume that was created with a type
# that has since been deleted.
volume_type = db.volume_type_get(context, volume_type_id, True)
for quota in ('volumes', 'gigabytes', 'snapshots'):
if quota in opts:
vtype_quota = "%s_%s" % (quota, volume_type['name'])
opts[vtype_quota] = opts[quota]
@property
def resource_names(self):
return sorted(self.resources.keys())
@property
def resources(self):
return self._resources
class VolumeTypeQuotaEngine(QuotaEngine):
"""Represent the set of all quotas."""
@property
def resources(self):
"""Fetches all possible quota resources."""
result = {}
# Global quotas.
argses = [('volumes', '_sync_volumes', 'quota_volumes'),
('snapshots', '_sync_snapshots', 'quota_snapshots'),
('gigabytes', '_sync_gigabytes', 'quota_gigabytes'),
('backups', '_sync_backups', 'quota_backups'),
('backup_gigabytes', '_sync_backup_gigabytes',
'quota_backup_gigabytes')]
for args in argses:
resource = ReservableResource(*args)
result[resource.name] = resource
# Volume type quotas.
volume_types = db.volume_type_get_all(context.get_admin_context(),
False)
for volume_type in volume_types.values():
for part_name in ('volumes', 'gigabytes', 'snapshots'):
resource = VolumeTypeResource(part_name, volume_type)
result[resource.name] = resource
return result
def register_resource(self, resource):
raise NotImplementedError(_("Cannot register resource"))
def register_resources(self, resources):
raise NotImplementedError(_("Cannot register resources"))
class CGQuotaEngine(QuotaEngine):
"""Represent the consistencygroup quotas."""
@property
def resources(self):
"""Fetches all possible quota resources."""
result = {}
# Global quotas.
argses = [('consistencygroups', '_sync_consistencygroups',
'quota_consistencygroups'), ]
for args in argses:
resource = ReservableResource(*args)
result[resource.name] = resource
return result
def register_resource(self, resource):
raise NotImplementedError(_("Cannot register resource"))
def register_resources(self, resources):
raise NotImplementedError(_("Cannot register resources"))
QUOTAS = VolumeTypeQuotaEngine()
CGQUOTAS = CGQuotaEngine()
| apache-2.0 |
timm/sandbox | py/cocomo/poly.py | 1 | 3693 | import random
def tunings(_ = None):
Within(txt="loc", lo=2, hi=2000)
prep([
# vlow low nom high vhigh xhigh
# scale factors:
'Flex', 5.07, 4.05, 3.04, 2.03, 1.01, _],[
'Pmat', 7.80, 6.24, 4.68, 3.12, 1.56, _],[
'Prec', 6.20, 4.96, 3.72, 2.48, 1.24, _],[
'Resl', 7.07, 5.65, 4.24, 2.83, 1.41, _],[
'Team', 5.48, 4.38, 3.29, 2.19, 1.01, _],[
# effort multipliers:
'acap', 1.42, 1.19, 1.00, 0.85, 0.71, _],[
'aexp', 1.22, 1.10, 1.00, 0.88, 0.81, _],[
'cplx', 0.73, 0.87, 1.00, 1.17, 1.34, 1.74],[
'data', _, 0.90, 1.00, 1.14, 1.28, _],[
'docu', 0.81, 0.91, 1.00, 1.11, 1.23, _],[
'ltex', 1.20, 1.09, 1.00, 0.91, 0.84, _],[
'pcap', 1.34, 1.15, 1.00, 0.88, 0.76, _],[
'pcon', 1.29, 1.12, 1.00, 0.90, 0.81, _],[
'plex', 1.19, 1.09, 1.00, 0.91, 0.85, _],[
'pvol', _, 0.87, 1.00, 1.15, 1.30, _],[
'rely', 0.82, 0.92, 1.00, 1.10, 1.26, _],[
'ruse', _, 0.95, 1.00, 1.07, 1.15, 1.24],[
'sced', 1.43, 1.14, 1.00, 1.00, 1.00, _],[
'site', 1.22, 1.09, 1.00, 0.93, 0.86, 0.80],[
'stor', _, _, 1.00, 1.05, 1.17, 1.46],[
'time', _, _, 1.00, 1.11, 1.29, 1.63],[
'tool', 1.17, 1.09, 1.00, 0.90, 0.78, _])
def COCOMO2(project, t, a=2.94, b=0.91, e=2.7182818285):
em = t.acap() * t.aexp() * t.cplx() * t.data() * t.docu() * \
t.ltex() * t.pcap() * t.pcon() * t.plex() * t.pvol() * \
t.rely() * t.ruse() * t.sced() * t.site() * t.stor() * \
t.time() * t.tool()
sf = t.flex() + t.pmat() + t.prec() + t.resl() + t.team()
return a*em*loc()**(b+ 0.01*sf)
#####################################################
class o:
def __init__(i, **d): i.__dict__.update(d)
def X(y): return y() if callable(y) else y
def prep(*rows):
[ OneOf(row[0],row[1:]) for row in rows]
class Var:
all = o()
def __init__(i, txt) :
Var.all.__dict__[txt] = i
i.txt = txt
i.local = None
i.reset()
def reset(i): i.cache = Cache(i.any)
def any(i): return random.choice(i.local) if i.local else i.any1()
def __call__(i) : return i.cache()
def __neg__(i) : return -1*X(i)
def __pos__(i) : return +1*X(i)
def __abs__(i) : return abs(X(i))
def __lt__(i,j) : return X(i) < X(j)
def __gt__(i,j) : return X(i) > X(j)
def __le__(i,j) : return X(i) <= X(j)
def __ge__(i,j) : return X(i) >= X(j)
def __ne__(i,j) : return X(i) != X(j)
def __eq__(i,j) : return X(i) == X(j)
def __add__(i,j) : return X(i) + X(j)
def __sub__(i,j) : return X(i) - X(j)
def __mul__(i,j) : return X(i) * X(j)
def __mod__(i,j) : return X(i) % X(j)
def __pow__(i,j) : return X(i) ** X(j)
def __truediv__(i,j) : return X(i) / X(j)
def __floordiv__(i,j): return X(i) // X(j)
class Within(Var):
def __init__(i, txt, lo=0, hi=1):
super().__init__(txt)
i.lo, i.hi = lo,hi
def xplain(i,x): return x
def any1(i) : return random.uniform(i.lo, i.hi)
def __repr__(i): return '<%s %s to %s>' % (i.txt, i.lo, i.hi)
class OneOf(Var):
def __init__(i,txt,lst):
super().__init__(txt)
i.d = {n:x for n,x in enumerate(lst) if x}
i.show = {i.d[x]:x for x in i.d}
def xplain(i,x): return i.show[x]
def any1(i) : return i.d[ random.choice(list(i.d.keys())) ]
def __repr__(i): return '<%s %s>' % (i.txt, list(i.d.keys()))
class Cache():
def __init__(i, fun):
i.kept, i.fun = None,fun
def __call__(i):
i.kept = i.kept if i.kept is not None else X(i.fun)
return i.kept
tunings()
random.seed(1)
for _ in range(0,20):
#Var.all.acap.reset()
print([Var.all.acap.xplain(Var.all.acap()) for _ in range(0,10)])
| bsd-3-clause |
ForensicTools/GRREAT-475_2141-Chaigon-Failey-Siebert | gui/plugins/hunt_view.py | 2 | 45904 | #!/usr/bin/env python
# -*- mode: python; encoding: utf-8 -*-
#
"""This is the interface for managing hunts."""
import collections as py_collections
import operator
import StringIO
import urllib
import logging
from grr.gui import plot_lib
from grr.gui import renderers
from grr.gui.plugins import crash_view
from grr.gui.plugins import fileview
from grr.gui.plugins import foreman
from grr.gui.plugins import forms
from grr.gui.plugins import searchclient
from grr.gui.plugins import semantic
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import data_store
from grr.lib import flow
from grr.lib import hunts
from grr.lib import rdfvalue
from grr.lib import utils
class ManageHunts(renderers.Splitter2Way):
"""Manages Hunts GUI Screen."""
description = "Hunt Manager"
behaviours = frozenset(["General"])
top_renderer = "HuntTable"
bottom_renderer = "HuntViewTabs"
context_help_url = "user_manual.html#_creating_a_hunt"
layout_template = (renderers.Splitter2Way.layout_template +
renderers.TemplateRenderer.help_template)
def Layout(self, request, response):
response = super(ManageHunts, self).Layout(request, response)
return self.CallJavascript(response, "ManageHunts.Layout")
class HuntStateIcon(semantic.RDFValueRenderer):
"""Render the hunt state by using an icon.
This class is similar to FlowStateIcon, but it also adds STATE_STOPPED
state for hunts that were created but not yet started (because of lack of
approval, for example).
"""
layout_template = renderers.Template("""
<div class="centered hunt-state-icon" state="{{this.state_str|escape}}">
<img class='grr-icon grr-flow-icon'
src='/static/images/{{this.icon|escape}}' />
</div>
""")
# Maps the flow states to icons we can show
state_map = {"STOPPED": "stock_yes.png",
"STARTED": "clock.png",
"PAUSED": "pause.png"}
def Layout(self, request, response):
self.state_str = str(self.proxy)
self.icon = self.state_map.get(self.proxy, "question-red.png")
return super(HuntStateIcon, self).Layout(request, response)
class RunHuntConfirmationDialog(renderers.ConfirmationDialogRenderer):
"""Dialog that asks confirmation to run a hunt and actually runs it."""
post_parameters = ["hunt_id"]
header = "Run a hunt?"
content_template = renderers.Template("""
<p>Are you sure you want to <strong>run</strong> this hunt?</p>
""")
ajax_template = renderers.Template("""
<p class="text-info">Hunt started successfully!</p>
""")
def Layout(self, request, response):
self.check_access_subject = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
return super(RunHuntConfirmationDialog, self).Layout(request, response)
def RenderAjax(self, request, response):
flow.GRRFlow.StartFlow(flow_name="StartHuntFlow", token=request.token,
hunt_urn=rdfvalue.RDFURN(request.REQ.get("hunt_id")))
return self.RenderFromTemplate(self.ajax_template, response,
unique=self.unique)
class PauseHuntConfirmationDialog(renderers.ConfirmationDialogRenderer):
"""Dialog that asks confirmation to pause a hunt and actually runs it."""
post_parameters = ["hunt_id"]
header = "Pause a hunt?"
content_template = renderers.Template("""
<p>Are you sure you want to <strong>pause</strong> this hunt?</p>
""")
ajax_template = renderers.Template("""
<p class="text-info">Hunt paused successfully!</p>
""")
def Layout(self, request, response):
self.check_access_subject = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
return super(PauseHuntConfirmationDialog, self).Layout(request, response)
def RenderAjax(self, request, response):
flow.GRRFlow.StartFlow(flow_name="PauseHuntFlow", token=request.token,
hunt_urn=rdfvalue.RDFURN(request.REQ.get("hunt_id")))
return self.RenderFromTemplate(self.ajax_template, response,
unique=self.unique)
class ModifyHuntDialog(renderers.ConfirmationDialogRenderer):
"""Dialog that allows user to modify certain hunt parameters."""
post_parameters = ["hunt_id"]
header = "Modify a hunt"
proceed_button_title = "Modify!"
expiry_time_dividers = ((60*60*24, "d"), (60*60, "h"), (60, "m"), (1, "s"))
content_template = renderers.Template("""
{{this.hunt_params_form|safe}}
""")
ajax_template = renderers.Template("""
<p class="text-info">Hunt modified successfully!</p>
""")
def Layout(self, request, response):
"""Layout handler."""
hunt_urn = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
with aff4.FACTORY.Open(hunt_urn, aff4_type="GRRHunt",
token=request.token) as hunt:
runner = hunt.GetRunner()
hunt_args = rdfvalue.ModifyHuntFlowArgs(
client_limit=runner.args.client_limit,
expiry_time=runner.context.expires,
)
self.hunt_params_form = forms.SemanticProtoFormRenderer(
hunt_args, supressions=["hunt_urn"]).RawHTML(request)
self.check_access_subject = hunt_urn
return super(ModifyHuntDialog, self).Layout(request, response)
def RenderAjax(self, request, response):
"""Starts ModifyHuntFlow that actually modifies a hunt."""
hunt_urn = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
args = forms.SemanticProtoFormRenderer(
rdfvalue.ModifyHuntFlowArgs()).ParseArgs(request)
flow.GRRFlow.StartFlow(flow_name="ModifyHuntFlow", token=request.token,
hunt_urn=hunt_urn, args=args)
return self.RenderFromTemplate(self.ajax_template, response,
unique=self.unique)
class HuntTable(fileview.AbstractFileTable):
"""Show all hunts."""
selection_publish_queue = "hunt_select"
custom_class = "HuntTable"
layout_template = """
<div id="new_hunt_dialog_{{unique|escape}}"
class="modal wide-modal high-modal" update_on_show="true"
tabindex="-1" role="dialog" aria-hidden="true">
</div>
<div id="run_hunt_dialog_{{unique|escape}}"
class="modal" tabindex="-1" role="dialog" aria-hidden="true">
</div>
<div id="pause_hunt_dialog_{{unique|escape}}"
class="modal" tabindex="-1" role="dialog" aria-hidden="true">
</div>
<div id="modify_hunt_dialog_{{unique|escape}}"
class="modal" tabindex="-1" role="dialog" aria-hidden="true">
</div>
<ul class="breadcrumb">
<li>
<button id='new_hunt_{{unique|escape}}' title='New Hunt'
class="btn btn-default" name="NewHunt" data-toggle="modal"
data-target="#new_hunt_dialog_{{unique|escape}}">
<img src='/static/images/new.png' class='toolbar_icon'>
</button>
<div class="btn-group">
<button id='run_hunt_{{unique|escape}}' title='Run Hunt'
class="btn btn-default" disabled="yes" name="RunHunt" data-toggle="modal"
data-target="#run_hunt_dialog_{{unique|escape}}">
<img src='/static/images/play_button.png' class='toolbar_icon'>
</button>
<button id='pause_hunt_{{unique|escape}}' title='Pause Hunt'
class="btn btn-default" disabled="yes" name="PauseHunt" data-toggle="modal"
data-target="#pause_hunt_dialog_{{unique|escape}}">
<img src='/static/images/pause_button.png' class='toolbar_icon'>
</button>
<button id='modify_hunt_{{unique|escape}}' title='Modify Hunt'
class="btn btn-default" disabled="yes" name="ModifyHunt" data-toggle="modal"
data-target="#modify_hunt_dialog_{{unique|escape}}">
<img src='/static/images/modify.png' class='toolbar_icon'>
</button>
<button id='toggle_robot_hunt_display_{{unique|escape}}'
title='Show/Hide Automated hunts'
class="btn btn-default" name="ToggleRobotHuntDisplay">
<img src='/static/images/robot.png' class='toolbar_icon'>
</button>
</div>
<div class="new_hunt_dialog" id="new_hunt_dialog_{{unique|escape}}"
class="hide" />
</li>
</ul>
""" + fileview.AbstractFileTable.layout_template
root_path = "aff4:/hunts"
def __init__(self, **kwargs):
super(HuntTable, self).__init__(**kwargs)
self.AddColumn(semantic.RDFValueColumn(
"Status", renderer=HuntStateIcon, width="40px"))
# The hunt id is the AFF4 URN for the hunt object.
self.AddColumn(semantic.RDFValueColumn(
"Hunt ID", renderer=semantic.SubjectRenderer))
self.AddColumn(semantic.RDFValueColumn("Name"))
self.AddColumn(semantic.RDFValueColumn("Start Time", width="16em"))
self.AddColumn(semantic.RDFValueColumn("Expires", width="16em"))
self.AddColumn(semantic.RDFValueColumn("Client Limit"))
self.AddColumn(semantic.RDFValueColumn("Creator"))
self.AddColumn(semantic.RDFValueColumn("Description", width="100%"))
def Layout(self, request, response):
response = super(HuntTable, self).Layout(request, response)
return self.CallJavascript(response, "HuntTable.Layout")
def BuildTable(self, start_row, end_row, request):
fd = aff4.FACTORY.Open("aff4:/hunts", mode="r", token=request.token)
try:
children = list(fd.ListChildren())
nr_hunts = len(children)
children.sort(key=operator.attrgetter("age"), reverse=True)
children = children[start_row:end_row]
hunt_list = []
for hunt in fd.OpenChildren(children=children):
# Skip hunts that could not be unpickled.
if not isinstance(hunt, hunts.GRRHunt) or not hunt.state:
continue
hunt.create_time = hunt.GetRunner().context.create_time
hunt_list.append(hunt)
hunt_list.sort(key=lambda x: x.create_time, reverse=True)
could_not_display = []
row_index = start_row
for hunt_obj in hunt_list:
if not isinstance(hunt_obj, hunts.GRRHunt):
could_not_display.append((hunt_obj, "Object is not a valid hunt."))
continue
if hunt_obj.state.Empty():
logging.error("Hunt without a valid state found: %s", hunt_obj)
could_not_display.append((hunt_obj,
"Hunt doesn't have a valid state."))
continue
runner = hunt_obj.GetRunner()
description = (runner.args.description or
hunt_obj.__class__.__doc__.split("\n", 1)[0])
self.AddRow({"Hunt ID": hunt_obj.urn,
"Name": hunt_obj.__class__.__name__,
"Status": hunt_obj.Get(hunt_obj.Schema.STATE),
"Start Time": runner.context.start_time,
"Expires": runner.context.expires,
"Client Limit": runner.args.client_limit,
"Creator": runner.context.creator,
"Description": description},
row_index=row_index)
# Hide automated hunts by default
if runner.context.creator == "GRRWorker":
self.SetRowClass(row_index, "robot-hunt hide")
row_index += 1
for hunt_obj, reason in could_not_display:
self.AddRow({"Hunt ID": hunt_obj.urn,
"Description": reason},
row_index=row_index)
row_index += 1
except IOError as e:
logging.error("Bad hunt %s", e)
return nr_hunts >= end_row
class HuntViewTabs(renderers.TabLayout):
"""Show a tabset to inspect the selected hunt.
Listening Javascript Events:
- file_select(aff4_path, age) - A selection event on the hunt table
informing us of a new hunt to show. We redraw the entire bottom right
side using a new renderer.
"""
names = ["Overview", "Log", "Errors", "Graph", "Results", "Stats",
"Crashes", "Outstanding", "Context Detail"]
delegated_renderers = ["HuntOverviewRenderer", "HuntLogRenderer",
"HuntErrorRenderer",
"HuntClientGraphRenderer", "HuntResultsRenderer",
"HuntStatsRenderer", "HuntCrashesRenderer",
"HuntOutstandingRenderer", "HuntContextView"]
empty_template = renderers.Template("""
<div class="padded" id="{{unique|escape}}">
<p>Please select a hunt to see its details here.</p>
</div>
""")
post_parameters = ["hunt_id"]
def Layout(self, request, response):
hunt_id = request.REQ.get("hunt_id")
if hunt_id:
response = super(HuntViewTabs, self).Layout(request, response)
else:
response = super(HuntViewTabs, self).Layout(
request, response, apply_template=self.empty_template)
return self.CallJavascript(response, "HuntViewTabs.Layout")
class ManageHuntsClientView(renderers.Splitter2Way):
"""Manages the clients involved in a hunt."""
description = "Hunt Client View"
top_renderer = "HuntClientTableRenderer"
bottom_renderer = "HuntClientViewTabs"
class ResourceRenderer(semantic.RDFValueRenderer):
"""Renders resource usage as meters."""
cls = "vertical_aligned"
layout_template = renderers.Template(
"<div>"
"<meter value=\"{{this.proxy|escape}}\"></meter>"
"</div>")
class FloatRenderer(semantic.RDFValueRenderer):
layout_template = renderers.Template("{{this.value|escape}}")
def Layout(self, request, response):
if self.proxy is None:
self.value = "0.0"
else:
self.value = "%.2f" % self.proxy
super(FloatRenderer, self).Layout(request, response)
class HuntClientTableRenderer(fileview.AbstractFileTable):
"""Displays the clients."""
selection_publish_queue = "hunt_client_select"
layout_template = """
{{this.title|escape}}
<a id="backlink_{{unique|escape}}" href='#{{this.hash|escape}}'>
back to hunt view</a>
<span class='pull-right'> Filter by State
<select id='{{unique|escape}}_select'>
<option>ALL</option>
<option>OUTSTANDING</option>
<option>COMPLETED</option>
<option>BAD</option>
</select>
</span>
""" + fileview.AbstractFileTable.layout_template
post_parameters = ["hunt_id"]
def __init__(self, **kwargs):
super(HuntClientTableRenderer, self).__init__(**kwargs)
self.AddColumn(semantic.RDFValueColumn(
"Client ID", width="20%", renderer=semantic.SubjectRenderer))
self.AddColumn(semantic.RDFValueColumn("Hostname", width="10%"))
self.AddColumn(semantic.RDFValueColumn("Status", width="10%"))
self.AddColumn(semantic.RDFValueColumn("User CPU seconds", width="10%",
renderer=FloatRenderer))
self.AddColumn(semantic.RDFValueColumn("System CPU seconds", width="10%",
renderer=FloatRenderer))
self.AddColumn(semantic.RDFValueColumn("CPU",
renderer=ResourceRenderer,
width="10%"))
self.AddColumn(semantic.RDFValueColumn("Network bytes sent", width="10%"))
self.AddColumn(semantic.RDFValueColumn("Network",
renderer=ResourceRenderer,
width="10%"))
self.AddColumn(semantic.RDFValueColumn("Last Checkin", width="10%"))
def Layout(self, request, response):
"""Ensure our hunt is in our state for HTML layout."""
hunt_id = request.REQ.get("hunt_id")
self.title = "Viewing Hunt %s" % hunt_id
h = dict(main="ManageHunts", hunt_id=hunt_id)
self.hunt_hash = urllib.urlencode(sorted(h.items()))
response = super(HuntClientTableRenderer, self).Layout(request, response)
return self.CallJavascript(response, "HuntClientTableRenderer.Layout",
hunt_hash=self.hunt_hash)
def BuildTable(self, start_row, end_row, request):
"""Called to fill in the data in the table."""
hunt_id = request.REQ.get("hunt_id")
completion_status_filter = request.REQ.get("completion_status", "ALL")
if hunt_id is None:
return
try:
self.hunt = aff4.FACTORY.Open(hunt_id, token=request.token,
aff4_type="GRRHunt")
except IOError:
logging.error("Invalid hunt %s", hunt_id)
return
# TODO(user): enable per-client resource usage display.
resource_usage = {}
resource_max = [0, 0, 0]
for resource in resource_usage.values():
for i in range(3):
if resource_max[i] < resource[i]:
resource_max[i] = resource[i]
results = {}
for status, client_list in self.hunt.GetClientsByStatus().items():
if (completion_status_filter == "ALL" or
status == completion_status_filter):
for client in client_list:
results[client] = status
# Get the list of clients and sort so that we can page accurately.
client_list = results.keys()
client_list.sort()
client_list = client_list[start_row:end_row]
row_index = start_row
for c_urn, cdict in self.hunt.GetClientStates(client_list):
row = {"Client ID": c_urn,
"Hostname": cdict.get("hostname"),
"Status": results[c_urn],
"Last Checkin": searchclient.FormatLastSeenTime(
cdict.get("age") or 0),
}
client_id = c_urn.Basename()
if client_id in resource_usage:
usage = resource_usage[client_id]
row["User CPU seconds"] = usage[0]
row["System CPU seconds"] = usage[1]
row["Network bytes sent"] = usage[2]
usage_percent = []
for i in range(3):
if resource_max[i]:
usage_percent.append(round(usage[i], 2) / resource_max[i])
else:
usage_percent.append(0.0)
row["CPU"] = usage_percent[0]
row["Network"] = usage_percent[2]
else:
row["User CPU seconds"] = 0
row["System CPU seconds"] = 0
row["Network bytes sent"] = 0
row["CPU"] = 0
row["Network"] = 0
self.AddRow(row, row_index)
row_index += 1
self.size = len(results)
class AbstractLogRenderer(renderers.TemplateRenderer):
"""Render a page for view a Log file.
Implements a very simple view. That will be extended with filtering
capabilities.
Implementations should implement the GetLog function.
"""
show_total_count = False
layout_template = renderers.Template("""
<table class="proto_table">
{% if this.log|length > 0 %}
{% if this.show_total_count %}
<h5>{{this.log|length}} Entries</h5>
{% endif %}
{% endif %}
{% for line in this.log %}
<tr>
{% for val in line %}
<td class="proto_key">{{ val|safe }}</td>
{% endfor %}
</tr>
{% empty %}
<tr><td>No entries</tr></td>
{% endfor %}
<table>
""")
def GetLog(self, request):
"""Take a request and return a list of tuples for a log."""
_ = request
return []
def Layout(self, request, response):
"""Fill in the form with the specific fields for the flow requested."""
self.log = []
for row in self.GetLog(request):
rendered_row = []
for item in row:
item_renderer = semantic.FindRendererForObject(item)
rendered_row.append(item_renderer.RawHTML(request))
self.log.append(rendered_row)
return super(AbstractLogRenderer, self).Layout(request, response)
class HuntOverviewRenderer(AbstractLogRenderer):
"""Renders the overview tab."""
# Will be retrieved from request.REQ if not set.
hunt_id = None
layout_template = renderers.Template("""
<a id="ViewHuntDetails_{{unique}}" href='#{{this.hash|escape}}'
onclick='grr.loadFromHash("{{this.hash|escape}}");'
class="btn btn-info">
View hunt details
</a>
<br/>
<dl class="dl-horizontal dl-hunt">
<dt>Name</dt><dd>{{ this.hunt_name|escape }}</dd>
<dt>Hunt ID</dt>
<dd>{{ this.hunt.urn.Basename|escape }}</dd>
<dt>Hunt URN</dt>
<dd>{{ this.hunt.urn|escape }}</dd>
<dt>Creator</dt>
<dd>{{ this.hunt_creator|escape }}</dd>
<dt>Client Limit</dt>
{% if this.client_limit == 0 %}
<dd>None</dd>
{% else %}
<dd>{{ this.client_limit|escape }}</dd>
{% endif %}
<dt>Client Rate (clients/min)</dt>
{% if this.client_rate == 0.0 %}
<dd>No rate limit</dd>
{% else %}
<dd>{{ this.client_rate|escape }}</dd>
{% endif %}
<dt>Clients Scheduled</dt>
<dd>{{ this.all_clients_count|escape }}</dd>
<dt>Outstanding</dt>
<dd>{{ this.outstanding_clients_count|escape }}</dd>
<dt>Completed</dt>
<dd>{{ this.completed_clients_count|escape }}</dd>
<dt>Total CPU seconds used</dt>
<dd>{{ this.cpu_sum|escape }}</dd>
<dt>Total network traffic</dt>
<dd>{{ this.net_sum|filesizeformat }}</dd>
<dt>Regex Rules</dt>
<dd>{{ this.regex_rules|safe }}</dd>
<dt>Integer Rules</dt>
<dd>{{ this.integer_rules|safe }}</dd>
<dt>Arguments</dt><dd>{{ this.args_str|safe }}</dd>
{% for key, val in this.data.items %}
<dt>{{ key|escape }}</dt><dd>{{ val|escape }}</dd>
{% endfor %}
</dl>
""")
error_template = renderers.Template(
"No information available for this Hunt.")
ajax_template = renderers.Template("""
<div id="RunHuntResult_{{unique|escape}}"></div>
""")
def RenderAjax(self, request, response):
self.hunt_id = request.REQ.get("hunt_id")
self.subject = rdfvalue.RDFURN(self.hunt_id)
response = renderers.TemplateRenderer.Layout(
self, request, response, apply_template=self.ajax_template)
return self.CallJavascript(response, "HuntOverviewRenderer.RenderAjax",
subject=self.subject, hunt_id=self.hunt_id)
def Layout(self, request, response):
"""Display the overview."""
if not self.hunt_id:
self.hunt_id = request.REQ.get("hunt_id")
h = dict(main="ManageHuntsClientView", hunt_id=self.hunt_id)
self.hash = urllib.urlencode(sorted(h.items()))
self.data = {}
self.args_str = ""
if self.hunt_id:
try:
self.hunt = aff4.FACTORY.Open(self.hunt_id, aff4_type="GRRHunt",
token=request.token)
if self.hunt.state.Empty():
raise IOError("No valid state could be found.")
hunt_stats = self.hunt.state.context.usage_stats
self.cpu_sum = "%.2f" % hunt_stats.user_cpu_stats.sum
self.net_sum = hunt_stats.network_bytes_sent_stats.sum
(self.all_clients_count,
self.completed_clients_count, _) = self.hunt.GetClientsCounts()
self.outstanding_clients_count = (self.all_clients_count -
self.completed_clients_count)
runner = self.hunt.GetRunner()
self.hunt_name = runner.args.hunt_name
self.hunt_creator = runner.context.creator
self.data = py_collections.OrderedDict()
self.data["Start Time"] = runner.context.start_time
self.data["Expiry Time"] = runner.context.expires
self.data["Status"] = self.hunt.Get(self.hunt.Schema.STATE)
self.client_limit = runner.args.client_limit
self.client_rate = runner.args.client_rate
self.args_str = renderers.DictRenderer(
self.hunt.state, filter_keys=["context"]).RawHTML(request)
if runner.args.regex_rules:
self.regex_rules = foreman.RegexRuleArray(
runner.args.regex_rules).RawHTML(request)
else:
self.regex_rules = "None"
if runner.args.integer_rules:
self.integer_rules = foreman.IntegerRuleArray(
runner.args.integer_rules).RawHTML(request)
else:
self.integer_rules = "None"
except IOError:
self.layout_template = self.error_template
return super(AbstractLogRenderer, self).Layout(request, response)
class HuntContextView(renderers.TemplateRenderer):
"""Render a the hunt context."""
layout_template = renderers.Template("""
{{this.args_str|safe}}
""")
def Layout(self, request, response):
"""Display hunt's context presented as dict."""
if not hasattr(self, "hunt_id"):
self.hunt_id = request.REQ.get("hunt_id")
self.hunt = aff4.FACTORY.Open(self.hunt_id, aff4_type="GRRHunt",
token=request.token)
if self.hunt.state.Empty():
raise IOError("No valid state could be found.")
self.args_str = renderers.DictRenderer(
self.hunt.state.context).RawHTML(request)
return super(HuntContextView, self).Layout(request, response)
class HuntLogRenderer(renderers.AngularDirectiveRenderer):
directive = "grr-hunt-log"
def Layout(self, request, response):
self.directive_args = {}
self.directive_args["hunt-urn"] = request.REQ.get("hunt_id")
return super(HuntLogRenderer, self).Layout(request, response)
class HuntErrorRenderer(renderers.AngularDirectiveRenderer):
directive = "grr-hunt-errors"
def Layout(self, request, response):
self.directive_args = {}
self.directive_args["hunt-urn"] = request.REQ.get("hunt_id")
return super(HuntErrorRenderer, self).Layout(request, response)
class HuntClientViewTabs(renderers.TabLayout):
"""Show a tabset to inspect the selected client of the selected hunt."""
names = ["Status", "Hunt Log", "Hunt Errors", "Client Detail"]
delegated_renderers = ["HuntClientOverviewRenderer", "HuntLogRenderer",
"HuntErrorRenderer", "HuntHostInformationRenderer"]
post_parameters = ["hunt_id", "hunt_client"]
def Layout(self, request, response):
response = super(HuntClientViewTabs, self).Layout(request, response)
return self.CallJavascript(response, "HuntClientViewTabs.Layout",
hunt_id=self.state["hunt_id"])
class HuntClientOverviewRenderer(renderers.TemplateRenderer):
"""Renders the Client Hunt Overview tab."""
layout_template = renderers.Template("""
<a href='#{{this.hash|escape}}' onclick='grr.loadFromHash(
"{{this.hash|escape}}");' ">
Go to client {{ this.client.urn|escape }}
</a>
<table class="proto_table">
<tr><td class="proto_key">Last Checkin</td>
<td>{{ this.last_checkin|escape }}</td>
</table>
""")
def Layout(self, request, response):
"""Display the overview."""
hunt_id = request.REQ.get("hunt_id")
hunt_client = request.REQ.get("hunt_client")
if hunt_id is not None and hunt_client is not None:
try:
self.client = aff4.FACTORY.Open(hunt_client, token=request.token,
aff4_type="VFSGRRClient")
self.last_checkin = rdfvalue.RDFDatetime(
self.client.Get(self.client.Schema.PING))
h = dict(main="HostInformation", c=self.client.client_id)
self.hash = urllib.urlencode(sorted(h.items()))
except IOError as e:
logging.error("Attempt to open client %s. Err %s", hunt_client, e)
return super(HuntClientOverviewRenderer, self).Layout(request, response)
class HuntClientGraphRenderer(renderers.TemplateRenderer):
"""Renders the button to download a hunt graph."""
layout_template = renderers.Template("""
{% if this.clients %}
<button id="{{ unique|escape }}">
Generate
</button>
{% else %}
No data to graph yet.
{% endif %}
""")
def Layout(self, request, response):
self.hunt_id = request.REQ.get("hunt_id")
hunt = aff4.FACTORY.Open(self.hunt_id, token=request.token)
all_count, _, _ = hunt.GetClientsCounts()
self.clients = bool(all_count)
response = super(HuntClientGraphRenderer, self).Layout(request, response)
return self.CallJavascript(response, "HuntClientGraphRenderer.Layout",
hunt_id=self.hunt_id)
class HuntClientCompletionGraphRenderer(renderers.ImageDownloadRenderer):
def Content(self, request, _):
"""Generates the actual image to display."""
hunt_id = request.REQ.get("hunt_id")
hunt = aff4.FACTORY.Open(hunt_id, aff4_type="GRRHunt", token=request.token)
clients_by_status = hunt.GetClientsByStatus()
cl = clients_by_status["STARTED"]
fi = clients_by_status["COMPLETED"]
cdict = {}
for c in cl:
cdict.setdefault(c, []).append(c.age)
fdict = {}
for c in fi:
fdict.setdefault(c, []).append(c.age)
cl_age = [int(min(x)/1e6) for x in cdict.values()]
fi_age = [int(min(x)/1e6) for x in fdict.values()]
cl_hist = {}
fi_hist = {}
for age in cl_age:
cl_hist.setdefault(age, 0)
cl_hist[age] += 1
for age in fi_age:
fi_hist.setdefault(age, 0)
fi_hist[age] += 1
t0 = min(cl_age) - 1
times = [t0]
cl = [0]
fi = [0]
all_times = set(cl_age) | set(fi_age)
cl_count = 0
fi_count = 0
for time in sorted(all_times):
# Check if there is a datapoint one second earlier, add one if not.
if times[-1] != time-1:
times.append(time)
cl.append(cl_count)
fi.append(fi_count)
cl_count += cl_hist.get(time, 0)
fi_count += fi_hist.get(time, 0)
times.append(time)
cl.append(cl_count)
fi.append(fi_count)
# Convert to hours, starting from 0.
times = [(t-t0)/3600.0 for t in times]
params = {"backend": "png"}
plot_lib.plt.rcParams.update(params)
plot_lib.plt.figure(1)
plot_lib.plt.clf()
plot_lib.plt.plot(times, cl, label="Agents issued.")
plot_lib.plt.plot(times, fi, label="Agents completed.")
plot_lib.plt.title("Agent Coverage")
plot_lib.plt.xlabel("Time (h)")
plot_lib.plt.ylabel(r"Agents")
plot_lib.plt.grid(True)
plot_lib.plt.legend(loc=4)
buf = StringIO.StringIO()
plot_lib.plt.savefig(buf)
buf.seek(0)
return buf.read()
class HuntHostInformationRenderer(fileview.AFF4Stats):
"""Modified HostInformation that reads from hunt_client variable."""
description = "Hunt Client Host Information"
css_class = "TableBody"
attributes_to_show = ["USERNAMES", "HOSTNAME", "MAC_ADDRESS", "INSTALL_DATE",
"SYSTEM", "CLOCK", "CLIENT_INFO"]
def Layout(self, request, response):
"""Produce a summary of the client information."""
client_id = request.REQ.get("hunt_client")
if client_id:
super(HuntHostInformationRenderer, self).Layout(
request, response, client_id=client_id,
aff4_path=rdfvalue.ClientURN(client_id),
age=aff4.ALL_TIMES)
class OutputPluginNoteRenderer(renderers.TemplateRenderer):
"""Baseclass for renderers who render output-plugin-specific notes."""
# Name of the output plugin class that this class should deal with.
for_output_plugin = None
def __init__(self, plugin_def=None, plugin_state=None, **kwargs):
super(OutputPluginNoteRenderer, self).__init__(**kwargs)
if plugin_def is None:
raise ValueError("plugin_def can't be None")
if plugin_state is None:
raise ValueError("plugin_state can't be None")
self.plugin_def = plugin_def
self.plugin_state = plugin_state
class CSVOutputPluginNoteRenderer(OutputPluginNoteRenderer):
"""Note renderer for CSV output plugin."""
for_output_plugin = "CSVOutputPlugin"
layout_template = renderers.Template("""
{% if this.output_urns %}
<div id="{{unique|escape}}" class="well well-small csv-output-note">
<p>CSV output plugin writes to following files
(last update on {{this.plugin_state.last_updated|escape}}):<br/>
{% for output_urn in this.output_urns %}
<a href="#" aff4_path="{{output_urn}}">{{output_urn|escape}}</a><br/>
{% endfor %}
</p>
</div>
{% endif %}
""")
def Layout(self, request, response):
self.output_urns = []
for output_file in self.plugin_state.files_by_type.values():
self.output_urns.append(output_file.urn)
response = super(CSVOutputPluginNoteRenderer, self).Layout(request,
response)
return self.CallJavascript(response, "CSVOutputPluginNoteRenderer.Layout")
class HuntResultsRenderer(semantic.RDFValueCollectionRenderer):
"""Displays a collection of hunt's results."""
layout_template = renderers.Template("""
{% for output_plugin_note in this.output_plugins_notes %}
{{output_plugin_note|safe}}
{% endfor %}
{% if this.exportable_results %}
<div id="generate_archive_{{unique|escape}}" class="well well-small">
<div class="export_tar pull-left">
Results of this hunt can be downloaded as an archive:
<div class="btn-group">
<button name="generate_tar" class="btn btn-default DownloadButton">
Generate TAR.GZ
</button>
<button class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a name="generate_zip" href="#">Generate ZIP</a></li>
</ul>
</div>
</div>
<div class="export_zip pull-left">
Results of this hunt can be downloaded as an archive:
<div class="btn-group">
<button class="btn btn-default DownloadButton" name="generate_zip">
Generate ZIP
</button>
<button class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a name="generate_tar" href="#">Generate TAR.GZ</a></li>
</ul>
</div>
</div>
<div class="pull-right">
<em>NOTE: generated archive will contain <strong>symlinks</strong>.<br/>
Unsure whether your archive utility supports them?<br/>
Just unpack the archive before browsing its contents.</em>
</div>
<div class="clearfix"></div>
</div>
<div id='generate_action_{{unique|escape}}'></div>
{% endif %}
""") + semantic.RDFValueCollectionRenderer.layout_template
error_template = renderers.Template("""
<p>This hunt hasn't stored any results yet.</p>
""")
context_help_url = "user_manual.html#_exporting_a_collection"
def Layout(self, request, response):
"""Layout the hunt results."""
hunt_id = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
hunt = aff4.FACTORY.Open(hunt_id, token=request.token)
metadata_urn = hunt.urn.Add("ResultsMetadata")
metadata = aff4.FACTORY.Create(
metadata_urn, aff4_type="HuntResultsMetadata", mode="r",
token=request.token)
output_plugins = metadata.Get(metadata.Schema.OUTPUT_PLUGINS)
self.output_plugins_notes = []
for _, (plugin_def, plugin_state) in output_plugins.iteritems():
plugin_name = plugin_def.plugin_name
for renderer_class in renderers.Renderer.classes.values():
if getattr(renderer_class, "for_output_plugin", None) == plugin_name:
renderer = renderer_class(plugin_def=plugin_def,
plugin_state=plugin_state)
self.output_plugins_notes.append(renderer.RawHTML(request))
export_view = renderers.CollectionExportView
self.exportable_results = export_view.IsCollectionExportable(
hunt.state.context.results_collection_urn,
token=request.token)
# In this renderer we show hunt results stored in the results collection.
response = super(HuntResultsRenderer, self).Layout(
request, response,
aff4_path=hunt.GetRunner().context.results_collection_urn)
return self.CallJavascript(response, "HuntResultsRenderer.Layout",
exportable_results=self.exportable_results,
hunt_id=hunt_id)
class HuntGenerateResultsArchive(renderers.TemplateRenderer):
layout_template = renderers.Template("""
<div class="alert alert-success">
<em>Generation has started. An email will be sent upon completion.</em>
</div>
""")
def Layout(self, request, response):
"""Start the flow to generate zip file."""
hunt_id = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
archive_format = utils.SmartStr(request.REQ.get("format"))
if (archive_format not in
rdfvalue.ExportHuntResultsFilesAsArchiveArgs.ArchiveFormat.enum_dict):
raise ValueError("Invalid format: %s.", format)
urn = flow.GRRFlow.StartFlow(flow_name="ExportHuntResultFilesAsArchive",
hunt_urn=hunt_id, format=archive_format,
token=request.token)
logging.info("Generating %s results for %s with flow %s.", format,
hunt_id, urn)
return super(HuntGenerateResultsArchive, self).Layout(request, response)
class HuntStatsRenderer(renderers.TemplateRenderer):
"""Display hunt's resources usage stats."""
layout_template = renderers.Template("""
<h3>Total number of clients: {{this.stats.user_cpu_stats.num|escape}}</h3>
<h3>User CPU</h3>
<dl class="dl-horizontal">
<dt>User CPU mean</dt>
<dd>{{this.stats.user_cpu_stats.mean|floatformat}}</dd>
<dt>User CPU stdev</dt>
<dd>{{this.stats.user_cpu_stats.std|floatformat}}</dd>
<dt>Clients Histogram</dt>
<dd class="histogram">
<div id="user_cpu_{{unique|escape}}"></div>
</dd>
</dl>
<h3>System CPU</h3>
<dl class="dl-horizontal">
<dt>System CPU mean</dt>
<dd>{{this.stats.system_cpu_stats.mean|floatformat}}</dd>
<dt>System CPU stdev</dt>
<dd>{{this.stats.system_cpu_stats.std|floatformat}}</dd>
<dt>Clients Histogram</dt>
<dd class="histogram">
<div id="system_cpu_{{unique|escape}}"></div>
</dd>
</dl>
<h3>Network bytes sent</h3>
<dl class="dl-horizontal">
<dt>Network bytes sent mean</dt>
<dd>{{this.stats.network_bytes_sent_stats.mean|floatformat}}</dd>
<dt>Network bytes sent stdev</dt>
<dd>{{this.stats.network_bytes_sent_stats.std|floatformat}}</dd>
<dt>Clients Hisogram</dt>
<dd class="histogram">
<div id="network_bytes_sent_{{unique|escape}}"></div>
</dd>
</dl>
<h3>Worst performers</h3>
<div class="row">
<div class="col-md-8">
<table id="performers_{{unique|escape}}"
class="table table-condensed table-striped table-bordered">
<thead>
<th>Client Id</th>
<th>User CPU</th>
<th>System CPU</th>
<th>Network bytes sent</th>
</thead>
<tbody>
{% for r in this.stats.worst_performers %}
<tr>
<td>{{r.client_html|safe}}</td>
<td>{{r.cpu_usage.user_cpu_time|floatformat}}</td>
<td>{{r.cpu_usage.system_cpu_time|floatformat}}</td>
<td>{{r.network_bytes_sent|escape}}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
""")
error_template = renderers.Template(
"No information available for this Hunt.")
def _HistogramToJSON(self, histogram):
hist_data = [(b.range_max_value, b.num) for b in histogram.bins]
return renderers.JsonDumpForScriptContext(hist_data)
def Layout(self, request, response):
"""Layout the HuntStatsRenderer data."""
hunt_id = request.REQ.get("hunt_id")
if hunt_id:
try:
hunt = aff4.FACTORY.Open(hunt_id,
aff4_type="GRRHunt",
token=request.token)
if hunt.state.Empty():
raise IOError("No valid state could be found.")
self.stats = hunt.state.context.usage_stats
for item in self.stats.worst_performers:
renderer = semantic.FindRendererForObject(item.client_id)
item.client_html = renderer.RawHTML()
self.user_cpu_json_data = self._HistogramToJSON(
self.stats.user_cpu_stats.histogram)
self.system_cpu_json_data = self._HistogramToJSON(
self.stats.user_cpu_stats.histogram)
self.network_bytes_sent_json_data = self._HistogramToJSON(
self.stats.network_bytes_sent_stats.histogram)
response = super(HuntStatsRenderer, self).Layout(request, response)
return self.CallJavascript(
response, "HuntStatsRenderer.Layout",
user_cpu_json_data=self.user_cpu_json_data,
system_cpu_json_data=self.system_cpu_json_data,
network_bytes_sent_json_data=self.network_bytes_sent_json_data)
except IOError:
self.layout_template = self.error_template
return super(HuntStatsRenderer, self).Layout(request, response)
class HuntCrashesRenderer(crash_view.ClientCrashCollectionRenderer):
"""View launched flows in a tree."""
def Layout(self, request, response):
hunt_id = request.REQ.get("hunt_id")
self.crashes_urn = rdfvalue.RDFURN(hunt_id).Add("crashes")
super(HuntCrashesRenderer, self).Layout(request, response)
class HuntOutstandingRenderer(renderers.TableRenderer):
"""A renderer that shows debug information for outstanding clients."""
post_parameters = ["hunt_id"]
def __init__(self, **kwargs):
super(HuntOutstandingRenderer, self).__init__(**kwargs)
self.AddColumn(semantic.RDFValueColumn("Client"))
self.AddColumn(semantic.RDFValueColumn("Flow"))
self.AddColumn(semantic.RDFValueColumn("Incomplete Request #"))
self.AddColumn(semantic.RDFValueColumn("State"))
self.AddColumn(semantic.RDFValueColumn("Args Expected"))
self.AddColumn(semantic.RDFValueColumn("Available Responses"))
self.AddColumn(semantic.RDFValueColumn("Status"))
self.AddColumn(semantic.RDFValueColumn("Expected Responses"))
self.AddColumn(semantic.RDFValueColumn("Client Requests Pending"))
def GetClientRequests(self, client_urns, token):
"""Returns all client requests for the given client urns."""
task_urns = [urn.Add("tasks") for urn in client_urns]
client_requests_raw = data_store.DB.MultiResolveRegex(task_urns, "task:.*",
token=token)
client_requests = {}
for client_urn, requests in client_requests_raw:
client_id = str(client_urn)[6:6+18]
client_requests.setdefault(client_id, [])
for _, serialized, _ in requests:
client_requests[client_id].append(rdfvalue.GrrMessage(serialized))
return client_requests
def GetAllSubflows(self, hunt_urn, client_urns, token):
"""Lists all subflows for a given hunt for all clients in client_urns."""
client_ids = [urn.Split()[0] for urn in client_urns]
client_bases = [hunt_urn.Add(client_id) for client_id in client_ids]
all_flows = []
act_flows = client_bases
while act_flows:
next_flows = []
for _, children in aff4.FACTORY.MultiListChildren(act_flows, token=token):
for flow_urn in children:
next_flows.append(flow_urn)
all_flows.extend(next_flows)
act_flows = next_flows
return all_flows
def GetFlowRequests(self, flow_urns, token):
"""Returns all outstanding requests for the flows in flow_urns."""
flow_requests = {}
flow_request_urns = [flow_urn.Add("state") for flow_urn in flow_urns]
for flow_urn, values in data_store.DB.MultiResolveRegex(
flow_request_urns, "flow:.*", token=token):
for subject, serialized, _ in values:
try:
if "status" in subject:
msg = rdfvalue.GrrMessage(serialized)
else:
msg = rdfvalue.RequestState(serialized)
except Exception as e: # pylint: disable=broad-except
logging.warn("Error while parsing: %s", e)
continue
flow_requests.setdefault(flow_urn, []).append(msg)
return flow_requests
def BuildTable(self, start_row, end_row, request):
"""Renders the table."""
hunt_id = request.REQ.get("hunt_id")
token = request.token
if hunt_id is None:
return
hunt_id = rdfvalue.RDFURN(hunt_id)
hunt = aff4.FACTORY.Open(hunt_id, aff4_type="GRRHunt", age=aff4.ALL_TIMES,
token=token)
clients_by_status = hunt.GetClientsByStatus()
outstanding = clients_by_status["OUTSTANDING"]
self.size = len(outstanding)
outstanding = sorted(outstanding)[start_row:end_row]
all_flow_urns = self.GetAllSubflows(hunt_id, outstanding, token)
flow_requests = self.GetFlowRequests(all_flow_urns, token)
try:
client_requests = self.GetClientRequests(outstanding, token)
except access_control.UnauthorizedAccess:
client_requests = None
waitingfor = {}
status_by_request = {}
for flow_urn in flow_requests:
for obj in flow_requests[flow_urn]:
if isinstance(obj, rdfvalue.RequestState):
waitingfor.setdefault(flow_urn, obj)
if waitingfor[flow_urn].id > obj.id:
waitingfor[flow_urn] = obj
elif isinstance(obj, rdfvalue.GrrMessage):
status_by_request.setdefault(flow_urn, {})[obj.request_id] = obj
response_urns = []
for request_base_urn, request in waitingfor.iteritems():
response_urns.append(rdfvalue.RDFURN(request_base_urn).Add(
"request:%08X" % request.id))
response_dict = dict(data_store.DB.MultiResolveRegex(
response_urns, "flow:.*", token=token))
row_index = start_row
for flow_urn in sorted(all_flow_urns):
request_urn = flow_urn.Add("state")
client_id = flow_urn.Split()[2]
try:
request_obj = waitingfor[request_urn]
response_urn = rdfvalue.RDFURN(request_urn).Add(
"request:%08X" % request_obj.id)
responses_available = len(response_dict.setdefault(response_urn, []))
status_available = "No"
responses_expected = "Unknown"
if request_obj.id in status_by_request.setdefault(request_urn, {}):
status_available = "Yes"
status = status_by_request[request_urn][request_obj.id]
responses_expected = status.response_id
if client_requests is None:
client_requests_available = "Must use raw access."
else:
client_requests_available = 0
for client_req in client_requests.setdefault(client_id, []):
if request_obj.request.session_id == client_req.session_id:
client_requests_available += 1
row_data = {
"Client": client_id,
"Flow": flow_urn,
"Incomplete Request #": request_obj.id,
"State": request_obj.next_state,
"Args Expected": request_obj.request.args_rdf_name,
"Available Responses": responses_available,
"Status": status_available,
"Expected Responses": responses_expected,
"Client Requests Pending": client_requests_available}
except KeyError:
row_data = {
"Client": client_id,
"Flow": flow_urn,
"Incomplete Request #": "No request found"}
self.AddRow(row_data, row_index=row_index)
row_index += 1
| apache-2.0 |
jblackburne/scikit-learn | sklearn/manifold/setup.py | 24 | 1279 | import os
from os.path import join
import numpy
from numpy.distutils.misc_util import Configuration
from sklearn._build_utils import get_blas_info
def configuration(parent_package="", top_path=None):
config = Configuration("manifold", parent_package, top_path)
libraries = []
if os.name == 'posix':
libraries.append('m')
config.add_extension("_utils",
sources=["_utils.c"],
include_dirs=[numpy.get_include()],
libraries=libraries,
extra_compile_args=["-O3"])
cblas_libs, blas_info = get_blas_info()
eca = blas_info.pop('extra_compile_args', [])
eca.append("-O4")
config.add_extension("_barnes_hut_tsne",
libraries=cblas_libs,
sources=["_barnes_hut_tsne.c"],
include_dirs=[join('..', 'src', 'cblas'),
numpy.get_include(),
blas_info.pop('include_dirs', [])],
extra_compile_args=eca, **blas_info)
config.add_subpackage('tests')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(**configuration().todict())
| bsd-3-clause |
JioCloud/tempest | tempest/services/volume/json/snapshots_client.py | 6 | 8121 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import time
from oslo_log import log as logging
from six.moves.urllib import parse as urllib
from tempest_lib import exceptions as lib_exc
from tempest.common import service_client
from tempest import exceptions
LOG = logging.getLogger(__name__)
class BaseSnapshotsClient(service_client.ServiceClient):
"""Base Client class to send CRUD Volume API requests."""
create_resp = 200
def list_snapshots(self, detail=False, params=None):
"""List all the snapshot."""
url = 'snapshots'
if detail:
url += '/detail'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBodyList(resp, body['snapshots'])
def show_snapshot(self, snapshot_id):
"""Returns the details of a single snapshot."""
url = "snapshots/%s" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['snapshot'])
def create_snapshot(self, volume_id, **kwargs):
"""
Creates a new snapshot.
volume_id(Required): id of the volume.
force: Create a snapshot even if the volume attached (Default=False)
display_name: Optional snapshot Name.
display_description: User friendly snapshot description.
"""
post_body = {'volume_id': volume_id}
post_body.update(kwargs)
post_body = json.dumps({'snapshot': post_body})
resp, body = self.post('snapshots', post_body)
body = json.loads(body)
self.expected_success(self.create_resp, resp.status)
return service_client.ResponseBody(resp, body['snapshot'])
def update_snapshot(self, snapshot_id, **kwargs):
"""Updates a snapshot."""
put_body = json.dumps({'snapshot': kwargs})
resp, body = self.put('snapshots/%s' % snapshot_id, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['snapshot'])
# NOTE(afazekas): just for the wait function
def _get_snapshot_status(self, snapshot_id):
body = self.show_snapshot(snapshot_id)
status = body['status']
# NOTE(afazekas): snapshot can reach an "error"
# state in a "normal" lifecycle
if (status == 'error'):
raise exceptions.SnapshotBuildErrorException(
snapshot_id=snapshot_id)
return status
# NOTE(afazkas): Wait reinvented again. It is not in the correct layer
def wait_for_snapshot_status(self, snapshot_id, status):
"""Waits for a Snapshot to reach a given status."""
start_time = time.time()
old_value = value = self._get_snapshot_status(snapshot_id)
while True:
dtime = time.time() - start_time
time.sleep(self.build_interval)
if value != old_value:
LOG.info('Value transition from "%s" to "%s"'
'in %d second(s).', old_value,
value, dtime)
if (value == status):
return value
if dtime > self.build_timeout:
message = ('Time Limit Exceeded! (%ds)'
'while waiting for %s, '
'but we got %s.' %
(self.build_timeout, status, value))
raise exceptions.TimeoutException(message)
time.sleep(self.build_interval)
old_value = value
value = self._get_snapshot_status(snapshot_id)
def delete_snapshot(self, snapshot_id):
"""Delete Snapshot."""
resp, body = self.delete("snapshots/%s" % str(snapshot_id))
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
def is_resource_deleted(self, id):
try:
self.show_snapshot(id)
except lib_exc.NotFound:
return True
return False
@property
def resource_type(self):
"""Returns the primary type of resource this client works with."""
return 'volume-snapshot'
def reset_snapshot_status(self, snapshot_id, status):
"""Reset the specified snapshot's status."""
post_body = json.dumps({'os-reset_status': {"status": status}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
def update_snapshot_status(self, snapshot_id, status, progress):
"""Update the specified snapshot's status."""
post_body = {
'status': status,
'progress': progress
}
post_body = json.dumps({'os-update_snapshot_status': post_body})
url = 'snapshots/%s/action' % str(snapshot_id)
resp, body = self.post(url, post_body)
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
def create_snapshot_metadata(self, snapshot_id, metadata):
"""Create metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.post(url, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['metadata'])
def show_snapshot_metadata(self, snapshot_id):
"""Get metadata of the snapshot."""
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['metadata'])
def update_snapshot_metadata(self, snapshot_id, metadata):
"""Update metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.put(url, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['metadata'])
def update_snapshot_metadata_item(self, snapshot_id, id, meta_item):
"""Update metadata item for the snapshot."""
put_body = json.dumps({'meta': meta_item})
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.put(url, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['meta'])
def delete_snapshot_metadata_item(self, snapshot_id, id):
"""Delete metadata item for the snapshot."""
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.delete(url)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
def force_delete_snapshot(self, snapshot_id):
"""Force Delete Snapshot."""
post_body = json.dumps({'os-force_delete': {}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
class SnapshotsClient(BaseSnapshotsClient):
"""Client class to send CRUD Volume V1 API requests."""
| apache-2.0 |
2013Commons/hue | desktop/core/ext-py/Django-1.4.5/tests/regressiontests/utils/functional.py | 93 | 1084 | from django.utils import unittest
from django.utils.functional import lazy, lazy_property
class FunctionalTestCase(unittest.TestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
"""Test that lazy also finds base class methods in the proxy object"""
class Base(object):
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertTrue('base_method' in dir(t))
def test_lazy_property(self):
class A(object):
def _get_do(self):
raise NotImplementedError
def _set_do(self, value):
raise NotImplementedError
do = lazy_property(_get_do, _set_do)
class B(A):
def _get_do(self):
return "DO IT"
self.assertRaises(NotImplementedError, lambda: A().do)
self.assertEqual(B().do, 'DO IT')
| apache-2.0 |
yongshengwang/hue | build/env/lib/python2.7/site-packages/setuptools/tests/test_test.py | 148 | 2329 | # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import os
import site
import pytest
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS("""
from setuptools import setup
setup(name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
)
""")
NS_INIT = DALS("""
# -*- coding: Latin-1 -*-
# Söme Arbiträry Ünicode to test Distribute Issüé 310
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
""")
TEST_PY = DALS("""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print "Foo" # Should fail under Python 3 unless 2to3 is used
test_suite = unittest.makeSuite(TestTest)
""")
@pytest.fixture
def sample_test(tmpdir_cwd):
os.makedirs('name/space/tests')
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
# name/__init__.py
with open('name/__init__.py', 'wb') as f:
f.write(NS_INIT.encode('Latin-1'))
# name/space/__init__.py
with open('name/space/__init__.py', 'wt') as f:
f.write('#empty\n')
# name/space/tests/__init__.py
with open('name/space/tests/__init__.py', 'wt') as f:
f.write(TEST_PY)
@pytest.mark.skipif('hasattr(sys, "real_prefix")')
@pytest.mark.usefixtures('user_override')
@pytest.mark.usefixtures('sample_test')
class TestTestTest:
def test_test(self):
params = dict(
name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
use_2to3=True,
)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
with contexts.quiet():
# The test runner calls sys.exit
with contexts.suppress_exceptions(SystemExit):
cmd.run()
| apache-2.0 |
MTDEV-KERNEL/MOTO-KERNEL | scripts/rt-tester/rt-tester.py | 904 | 5366 | #!/usr/bin/env python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"lockbkl" : "9",
"unlockbkl" : "10",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Seperate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
p4datasystems/CarnotKE | jyhton/Lib/xml/dom/__init__.py | 112 | 7194 | ########################################################################
#
# File Name: __init__.py
#
#
"""
WWW: http://4suite.org/4DOM e-mail: support@4suite.org
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.org/COPYRIGHT for license and copyright information
"""
class Node:
"""Class giving the nodeType and tree-position constants."""
# DOM implementations may use this as a base class for their own
# Node implementations. If they don't, the constants defined here
# should still be used as the canonical definitions as they match
# the values given in the W3C recommendation. Client code can
# safely refer to these values in all tests of Node.nodeType
# values.
ELEMENT_NODE = 1
ATTRIBUTE_NODE = 2
TEXT_NODE = 3
CDATA_SECTION_NODE = 4
ENTITY_REFERENCE_NODE = 5
ENTITY_NODE = 6
PROCESSING_INSTRUCTION_NODE = 7
COMMENT_NODE = 8
DOCUMENT_NODE = 9
DOCUMENT_TYPE_NODE = 10
DOCUMENT_FRAGMENT_NODE = 11
NOTATION_NODE = 12
# Based on DOM Level 3 (WD 9 April 2002)
TREE_POSITION_PRECEDING = 0x01
TREE_POSITION_FOLLOWING = 0x02
TREE_POSITION_ANCESTOR = 0x04
TREE_POSITION_DESCENDENT = 0x08
TREE_POSITION_EQUIVALENT = 0x10
TREE_POSITION_SAME_NODE = 0x20
TREE_POSITION_DISCONNECTED = 0x00
class UserDataHandler:
"""Class giving the operation constants for UserDataHandler.handle()."""
# Based on DOM Level 3 (WD 9 April 2002)
NODE_CLONED = 1
NODE_IMPORTED = 2
NODE_DELETED = 3
NODE_RENAMED = 4
class DOMError:
"""Class giving constants for error severity."""
# Based on DOM Level 3 (WD 9 April 2002)
SEVERITY_WARNING = 0
SEVERITY_ERROR = 1
SEVERITY_FATAL_ERROR = 2
# DOMException codes
INDEX_SIZE_ERR = 1
DOMSTRING_SIZE_ERR = 2
HIERARCHY_REQUEST_ERR = 3
WRONG_DOCUMENT_ERR = 4
INVALID_CHARACTER_ERR = 5
NO_DATA_ALLOWED_ERR = 6
NO_MODIFICATION_ALLOWED_ERR = 7
NOT_FOUND_ERR = 8
NOT_SUPPORTED_ERR = 9
INUSE_ATTRIBUTE_ERR = 10
# DOM Level 2
INVALID_STATE_ERR = 11
SYNTAX_ERR = 12
INVALID_MODIFICATION_ERR = 13
NAMESPACE_ERR = 14
INVALID_ACCESS_ERR = 15
# DOM Level 3
VALIDATION_ERR = 16
# EventException codes
UNSPECIFIED_EVENT_TYPE_ERR = 0
# Fourthought specific codes
FT_EXCEPTION_BASE = 1000
XML_PARSE_ERR = FT_EXCEPTION_BASE + 1
#RangeException codes
BAD_BOUNDARYPOINTS_ERR = 1
INVALID_NODE_TYPE_ERR = 2
class DOMException(Exception):
def __init__(self, code, msg=''):
self.code = code
self.msg = msg or DOMExceptionStrings[code]
def __str__(self):
return self.msg
class EventException(Exception):
def __init__(self, code, msg=''):
self.code = code
self.msg = msg or EventExceptionStrings[code]
return
def __str__(self):
return self.msg
class RangeException(Exception):
def __init__(self, code, msg):
self.code = code
self.msg = msg or RangeExceptionStrings[code]
Exception.__init__(self, self.msg)
class FtException(Exception):
def __init__(self, code, *args):
self.code = code
self.msg = FtExceptionStrings[code] % args
return
def __str__(self):
return self.msg
class IndexSizeErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INDEX_SIZE_ERR, msg)
class DomstringSizeErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, DOMSTRING_SIZE_ERR, msg)
# DOMStringSizeErr was accidentally introduced in rev 1.14 of this
# file, and was released as part of PyXML 0.6.4, 0.6.5, 0.6.6, 0.7,
# and 0.7.1. It has never been part of the Python DOM API, although
# it better matches the W3C recommendation. It should remain for
# compatibility, unfortunately.
#
DOMStringSizeErr = DomstringSizeErr
class HierarchyRequestErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, HIERARCHY_REQUEST_ERR, msg)
class WrongDocumentErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, WRONG_DOCUMENT_ERR, msg)
class InvalidCharacterErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_CHARACTER_ERR, msg)
class NoDataAllowedErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NO_DATA_ALLOWED_ERR, msg)
class NoModificationAllowedErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NO_MODIFICATION_ALLOWED_ERR, msg)
class NotFoundErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NOT_FOUND_ERR, msg)
class NotSupportedErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NOT_SUPPORTED_ERR, msg)
class InuseAttributeErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INUSE_ATTRIBUTE_ERR, msg)
class InvalidStateErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_STATE_ERR, msg)
class SyntaxErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, SYNTAX_ERR, msg)
class InvalidModificationErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_MODIFICATION_ERR, msg)
class NamespaceErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NAMESPACE_ERR, msg)
class InvalidAccessErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_ACCESS_ERR, msg)
class ValidationErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, VALIDATION_ERR, msg)
class UnspecifiedEventTypeErr(EventException):
def __init__(self, msg=''):
EventException.__init__(self, UNSPECIFIED_EVENT_TYPE_ERR, msg)
class XmlParseErr(FtException):
def __init__(self, msg=''):
FtException.__init__(self, XML_PARSE_ERR, msg)
#Specific Range Exceptions
class BadBoundaryPointsErr(RangeException):
def __init__(self, msg=''):
RangeException.__init__(self, BAD_BOUNDARYPOINTS_ERR, msg)
class InvalidNodeTypeErr(RangeException):
def __init__(self, msg=''):
RangeException.__init__(self, INVALID_NODE_TYPE_ERR, msg)
XML_NAMESPACE = "http://www.w3.org/XML/1998/namespace"
XMLNS_NAMESPACE = "http://www.w3.org/2000/xmlns/"
XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml"
EMPTY_NAMESPACE = None
EMPTY_PREFIX = None
import MessageSource
DOMExceptionStrings = MessageSource.__dict__['DOMExceptionStrings']
EventExceptionStrings = MessageSource.__dict__['EventExceptionStrings']
FtExceptionStrings = MessageSource.__dict__['FtExceptionStrings']
RangeExceptionStrings = MessageSource.__dict__['RangeExceptionStrings']
from domreg import getDOMImplementation,registerDOMImplementation
| apache-2.0 |
analyseuc3m/ANALYSE-v1 | common/test/acceptance/fixtures/xqueue.py | 206 | 1402 | """
Fixture to configure XQueue response.
"""
import requests
import json
from . import XQUEUE_STUB_URL
class XQueueResponseFixtureError(Exception):
"""
Error occurred while configuring the stub XQueue.
"""
pass
class XQueueResponseFixture(object):
"""
Configure the XQueue stub's response to submissions.
"""
def __init__(self, pattern, response_dict):
"""
Configure XQueue stub to POST `response_dict` (a dictionary)
back to the LMS when it receives a submission that contains the string
`pattern`.
Remember that there is one XQueue stub shared by all the tests;
if possible, you should have tests use unique queue names
to avoid conflict between tests running in parallel.
"""
self._pattern = pattern
self._response_dict = response_dict
def install(self):
"""
Configure the stub via HTTP.
"""
url = XQUEUE_STUB_URL + "/set_config"
# Configure the stub to respond to submissions to our queue
payload = {self._pattern: json.dumps(self._response_dict)}
response = requests.put(url, data=payload)
if not response.ok:
raise XQueueResponseFixtureError(
"Could not configure XQueue stub for queue '{1}'. Status code: {2}".format(
self._pattern, self._response_dict))
| agpl-3.0 |
falau/pogom | pogom/pgoapi/protos/POGOProtos/Networking/Requests/Messages/DownloadSettingsMessage_pb2.py | 16 | 2402 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Requests/Messages/DownloadSettingsMessage.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Requests/Messages/DownloadSettingsMessage.proto',
package='POGOProtos.Networking.Requests.Messages',
syntax='proto3',
serialized_pb=_b('\nEPOGOProtos/Networking/Requests/Messages/DownloadSettingsMessage.proto\x12\'POGOProtos.Networking.Requests.Messages\"\'\n\x17\x44ownloadSettingsMessage\x12\x0c\n\x04hash\x18\x01 \x01(\tb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_DOWNLOADSETTINGSMESSAGE = _descriptor.Descriptor(
name='DownloadSettingsMessage',
full_name='POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hash', full_name='POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage.hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=114,
serialized_end=153,
)
DESCRIPTOR.message_types_by_name['DownloadSettingsMessage'] = _DOWNLOADSETTINGSMESSAGE
DownloadSettingsMessage = _reflection.GeneratedProtocolMessageType('DownloadSettingsMessage', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADSETTINGSMESSAGE,
__module__ = 'POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage)
))
_sym_db.RegisterMessage(DownloadSettingsMessage)
# @@protoc_insertion_point(module_scope)
| mit |