commit
stringlengths 40
40
| subject
stringlengths 1
1.49k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| new_contents
stringlengths 1
29.8k
| old_contents
stringlengths 0
9.9k
| lang
stringclasses 3
values | proba
float64 0
1
|
---|---|---|---|---|---|---|---|
e145ef6ca54c9615f038601da17daf16550196d6 | Use environment variables to locate Windows GStreamer includes | binding.gyp | binding.gyp | {
"targets": [
{
"target_name": "gstreamer-superficial",
"sources": [ "gstreamer.cpp", "GLibHelpers.cpp", "GObjectWrap.cpp", "Pipeline.cpp" ],
"include_dirs": [
"<!(node -e \"require('nan')\")"
],
"cflags": [
"-Wno-cast-function-type"
],
"conditions" : [
["OS=='linux'", {
"include_dirs": [
'<!@(pkg-config gstreamer-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)'
],
"libraries": [
'<!@(pkg-config gstreamer-1.0 --libs)',
'<!@(pkg-config gstreamer-app-1.0 --libs)',
'<!@(pkg-config gstreamer-video-1.0 --libs)'
]
}],
["OS=='mac'", {
"include_dirs": [
'<!@(pkg-config gstreamer-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)'
],
"libraries": [
'<!@(pkg-config gstreamer-1.0 --libs)',
'<!@(pkg-config gstreamer-app-1.0 --libs)',
'<!@(pkg-config gstreamer-video-1.0 --libs)'
]
}],
["OS=='win'", {
"include_dirs": [
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)include\gstreamer-1.0",
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)lib\glib-2.0\include",
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)include\glib-2.0",
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)include\libxml2"
],
"libraries": [
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)lib\gstreamer-1.0.lib",
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)lib\gstapp-1.0.lib",
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)lib\gstvideo-1.0.lib",
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)lib\gobject-2.0.lib",
"<!(echo %GSTREAMER_1_0_ROOT_X86_64%)lib\glib-2.0.lib"
]
}]
]
}
]
}
| {
"targets": [
{
"target_name": "gstreamer-superficial",
"sources": [ "gstreamer.cpp", "GLibHelpers.cpp", "GObjectWrap.cpp", "Pipeline.cpp" ],
"include_dirs": [
"<!(node -e \"require('nan')\")"
],
"cflags": [
"-Wno-cast-function-type"
],
"conditions" : [
["OS=='linux'", {
"include_dirs": [
'<!@(pkg-config gstreamer-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)'
],
"libraries": [
'<!@(pkg-config gstreamer-1.0 --libs)',
'<!@(pkg-config gstreamer-app-1.0 --libs)',
'<!@(pkg-config gstreamer-video-1.0 --libs)'
]
}],
["OS=='mac'", {
"include_dirs": [
'<!@(pkg-config gstreamer-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)',
'<!@(pkg-config gstreamer-app-1.0 --cflags-only-I | sed s/-I//g)'
],
"libraries": [
'<!@(pkg-config gstreamer-1.0 --libs)',
'<!@(pkg-config gstreamer-app-1.0 --libs)',
'<!@(pkg-config gstreamer-video-1.0 --libs)'
]
}],
["OS=='win'", {
"include_dirs": [
"X:/gstreamer-sdk/1.0/x86_64/include/gstreamer-1.0",
"X:/gstreamer-sdk/1.0/x86_64/include/glib-2.0",
"X:/gstreamer-sdk/1.0/x86_64/include/libxml2"
],
"libraries": [
"X:/gstreamer-sdk/1.0/x86_64/lib/gstreamer-1.0.lib",
"X:/gstreamer-sdk/1.0/x86_64/lib/gstapp-1.0.lib",
"X:/gstreamer-sdk/1.0/x86_64/lib/gstvideo-1.0.lib",
"X:/gstreamer-sdk/1.0/x86_64/lib/gobject-2.0.lib",
"X:/gstreamer-sdk/1.0/x86_64/lib/glib-2.0.lib"
]
}]
]
}
]
}
| Python | 0 |
55dd6cb9dfb72fcbff89b10ccdd0d68c309d9aa9 | Enable RTTI on OS X to fix exception handling (gh issue #106) | binding.gyp | binding.gyp | {
"targets": [
{
"target_name": "oracle_bindings",
"sources": [ "src/connection.cpp",
"src/oracle_bindings.cpp",
"src/executeBaton.cpp",
"src/reader.cpp",
"src/statement.cpp",
"src/outParam.cpp" ],
"conditions": [
["OS=='mac'", {
"xcode_settings": {
"GCC_ENABLE_CPP_EXCEPTIONS": "YES",
"GCC_ENABLE_CPP_RTTI": "YES"
}
}],
["OS!='win'", {
"variables": {
"oci_include_dir%": "<!(if [ -z $OCI_INCLUDE_DIR ]; then echo \"/opt/instantclient/sdk/include/\"; else echo $OCI_INCLUDE_DIR; fi)",
"oci_lib_dir%": "<!(if [ -z $OCI_LIB_DIR ]; then echo \"/opt/instantclient/\"; else echo $OCI_LIB_DIR; fi)",
"oci_version%": "<!(if [ -z $OCI_VERSION ]; then echo 11; else echo $OCI_VERSION; fi)"
},
"libraries": [ "-locci", "-lclntsh", "-lnnz<(oci_version)" ],
"link_settings": {"libraries": [ '-L<(oci_lib_dir)'] }
}],
["OS=='win'", {
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"RuntimeLibrary": "2"
}
},
},
"Debug": {
"msvs_settings": {
"VCCLCompilerTool": {
"RuntimeLibrary": "3"
}
},
}
},
"variables": {
"oci_include_dir%": "<!(IF DEFINED OCI_INCLUDE_DIR (echo %OCI_INCLUDE_DIR%) ELSE (echo C:\oracle\instantclient\sdk\include))",
"oci_lib_dir%": "<!(IF DEFINED OCI_LIB_DIR (echo %OCI_LIB_DIR%) ELSE (echo C:\oracle\instantclient\sdk\lib\msvc))",
"oci_version%": "<!(IF DEFINED OCI_VERSION (echo %OCI_VERSION%) ELSE (echo 11))"
},
# "libraries": [ "-loci" ],
"link_settings": {"libraries": [ '<(oci_lib_dir)\oraocci<(oci_version).lib'] }
}]
],
"include_dirs": [ "<(oci_include_dir)" ],
"cflags!": [ "-fno-exceptions" ],
"cflags_cc!": [ "-fno-exceptions" ]
}
]
}
| {
"targets": [
{
"target_name": "oracle_bindings",
"sources": [ "src/connection.cpp",
"src/oracle_bindings.cpp",
"src/executeBaton.cpp",
"src/reader.cpp",
"src/statement.cpp",
"src/outParam.cpp" ],
"conditions": [
["OS=='mac'", {
"xcode_settings": {
"GCC_ENABLE_CPP_EXCEPTIONS": "YES"
}
}],
["OS!='win'", {
"variables": {
"oci_include_dir%": "<!(if [ -z $OCI_INCLUDE_DIR ]; then echo \"/opt/instantclient/sdk/include/\"; else echo $OCI_INCLUDE_DIR; fi)",
"oci_lib_dir%": "<!(if [ -z $OCI_LIB_DIR ]; then echo \"/opt/instantclient/\"; else echo $OCI_LIB_DIR; fi)",
"oci_version%": "<!(if [ -z $OCI_VERSION ]; then echo 11; else echo $OCI_VERSION; fi)"
},
"libraries": [ "-locci", "-lclntsh", "-lnnz<(oci_version)" ],
"link_settings": {"libraries": [ '-L<(oci_lib_dir)'] }
}],
["OS=='win'", {
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"RuntimeLibrary": "2"
}
},
},
"Debug": {
"msvs_settings": {
"VCCLCompilerTool": {
"RuntimeLibrary": "3"
}
},
}
},
"variables": {
"oci_include_dir%": "<!(IF DEFINED OCI_INCLUDE_DIR (echo %OCI_INCLUDE_DIR%) ELSE (echo C:\oracle\instantclient\sdk\include))",
"oci_lib_dir%": "<!(IF DEFINED OCI_LIB_DIR (echo %OCI_LIB_DIR%) ELSE (echo C:\oracle\instantclient\sdk\lib\msvc))",
"oci_version%": "<!(IF DEFINED OCI_VERSION (echo %OCI_VERSION%) ELSE (echo 11))"
},
# "libraries": [ "-loci" ],
"link_settings": {"libraries": [ '<(oci_lib_dir)\oraocci<(oci_version).lib'] }
}]
],
"include_dirs": [ "<(oci_include_dir)" ],
"cflags!": [ "-fno-exceptions" ],
"cflags_cc!": [ "-fno-exceptions" ]
}
]
}
| Python | 0 |
043a0ad774964d2608ee1c8bd8ba1abc5b2ed0b4 | Tweak binding.gyp so it doesn't error out on Windows | binding.gyp | binding.gyp | {
'targets': [{
'target_name': 'pty',
'conditions': [
['OS!="win"', {
'include_dirs' : [
'<!(node -e "require(\'nan\')")'
],
'sources': [
'src/unix/pty.cc'
],
'libraries': [
'-lutil',
'-L/usr/lib',
'-L/usr/local/lib'
],
'conditions': [
# http://www.gnu.org/software/gnulib/manual/html_node/forkpty.html
# One some systems (at least including Cygwin, Interix,
# OSF/1 4 and 5, and Mac OS X) linking with -lutil is not required.
['OS=="mac" or OS=="solaris"', {
'libraries!': [
'-lutil'
]
}]
]
}]
]
}],
}
| {
'conditions': [
['OS!="win"', {
'targets': [{
'target_name': 'pty',
'include_dirs' : [
'<!(node -e "require(\'nan\')")'
],
'sources': [
'src/unix/pty.cc'
],
'libraries': [
'-lutil',
'-L/usr/lib',
'-L/usr/local/lib'
],
'conditions': [
# http://www.gnu.org/software/gnulib/manual/html_node/forkpty.html
# One some systems (at least including Cygwin, Interix,
# OSF/1 4 and 5, and Mac OS X) linking with -lutil is not required.
['OS=="mac" or OS=="solaris"', {
'libraries!': [
'-lutil'
]
}]
]
}]
}]
]
}
| Python | 0 |
5a6f748981554cb4d4aa0b5500a9b86bd09eb1b5 | Add Linux static bindings | binding.gyp | binding.gyp | {
'targets': [
{
'target_name': 'zmq',
'sources': [ 'binding.cc' ],
'include_dirs' : [
"<!(node -e \"require('nan')\")"
],
'conditions': [
['OS=="win"', {
'win_delay_load_hook': 'true',
'include_dirs': ['windows/include'],
'link_settings': {
'libraries': [
'Delayimp.lib',
],
'conditions': [
['target_arch=="ia32"', {
'libraries': [
'<(PRODUCT_DIR)/../../windows/lib/x86/libzmq.lib',
]
},{
'libraries': [
'<(PRODUCT_DIR)/../../windows/lib/x64/libzmq.lib',
]
}]
],
},
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': ['libzmq.dll']
}
},
}, {
'libraries': [ '<(PRODUCT_DIR)/../../zmq/lib/libzmq.a' ],
'include_dirs': [ '<(PRODUCT_DIR)/../../zmq/include' ],
'cflags!': ['-fno-exceptions'],
'cflags_cc!': ['-fno-exceptions'],
}],
['OS=="mac" or OS=="solaris"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'MACOSX_DEPLOYMENT_TARGET': '10.6',
},
'libraries': [ '<(PRODUCT_DIR)/../../zmq/lib/libzmq.a' ],
}],
['OS=="openbsd" or OS=="freebsd"', {
}],
['OS=="linux"', {
'libraries': [ '<(PRODUCT_DIR)/../../zmq/lib/libzmq.a' ],
}],
]
}
]
}
| {
'targets': [
{
'target_name': 'zmq',
'sources': [ 'binding.cc' ],
'include_dirs' : [
"<!(node -e \"require('nan')\")"
],
'conditions': [
['OS=="win"', {
'win_delay_load_hook': 'true',
'include_dirs': ['windows/include'],
'link_settings': {
'libraries': [
'Delayimp.lib',
],
'conditions': [
['target_arch=="ia32"', {
'libraries': [
'<(PRODUCT_DIR)/../../windows/lib/x86/libzmq.lib',
]
},{
'libraries': [
'<(PRODUCT_DIR)/../../windows/lib/x64/libzmq.lib',
]
}]
],
},
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': ['libzmq.dll']
}
},
}, {
'libraries': [ '<(PRODUCT_DIR)/../../zmq/lib/libzmq.a' ],
'include_dirs': [ '<(PRODUCT_DIR)/../../zmq/include' ],
'cflags!': ['-fno-exceptions'],
'cflags_cc!': ['-fno-exceptions'],
}],
['OS=="mac" or OS=="solaris"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'MACOSX_DEPLOYMENT_TARGET': '10.6',
},
'libraries': [ '<(PRODUCT_DIR)/../../zmq/lib/libzmq.a' ],
}],
['OS=="openbsd" or OS=="freebsd"', {
}],
['OS=="linux"', {
}],
]
}
]
}
| Python | 0 |
777bb37f9ac4457dca79a07953356ce46b941a30 | change '-std=c++11' to '-std=c++0x' for linux | binding.gyp | binding.gyp | {
'targets': [
{
'target_name': 'eigen',
'sources': [
'src/EigenJS.cpp'
],
'include_dirs': [
'deps',
"<!(node -e \"require('nan')\")"
],
'conditions': [
['OS=="win"', {
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': 1,
'AdditionalOptions': [ '/GR', '/EHsc', '/wd4018', '/wd4506' ]
}
}
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'cflags': [ '-std=c++0x' ],
'cflags_cc!': [ '-fno-rtti', '-fno-exceptions']
}],
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'GCC_ENABLE_CPP_RTTI': 'YES',
'OTHER_CPLUSPLUSFLAGS': [ '-std=c++11', '-stdlib=libc++' ],
'OTHER_LDFLAGS': [ '-stdlib=libc++' ],
'MACOSX_DEPLOYMENT_TARGET': '10.7'
}
}]
]
}
]
}
| {
'targets': [
{
'target_name': 'eigen',
'sources': [
'src/EigenJS.cpp'
],
'include_dirs': [
'deps',
"<!(node -e \"require('nan')\")"
],
'conditions': [
['OS=="win"', {
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': 1,
'AdditionalOptions': [ '/GR', '/EHsc', '/wd4018', '/wd4506' ]
}
}
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'cflags': [ '-std=c++11' ],
'cflags_cc!': [ '-fno-rtti', '-fno-exceptions']
}],
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'GCC_ENABLE_CPP_RTTI': 'YES',
'OTHER_CPLUSPLUSFLAGS': [ '-std=c++11', '-stdlib=libc++' ],
'OTHER_LDFLAGS': [ '-stdlib=libc++' ],
'MACOSX_DEPLOYMENT_TARGET': '10.7'
}
}]
]
}
]
}
| Python | 0.000023 |
786e7d83672ad5ff2718c9a440dbd180f8e7b24a | make addon buildable as static library (#119) | binding.gyp | binding.gyp | {
'targets': [
{
'target_name': 'kerberos',
'type': 'loadable_module',
'include_dirs': [ '<!(node -e "require(\'nan\')")' ],
'sources': [
'src/kerberos.cc'
],
'xcode_settings': {
'MACOSX_DEPLOYMENT_TARGET': '10.12',
'OTHER_CFLAGS': [
"-std=c++11",
"-stdlib=libc++"
],
},
'conditions': [
['OS=="mac" or OS=="linux"', {
'sources': [
'src/unix/base64.cc',
'src/unix/kerberos_gss.cc',
'src/unix/kerberos_unix.cc'
],
'link_settings': {
'libraries': [
'-lkrb5',
'-lgssapi_krb5'
]
},
'conditions': [
['_type=="static_library"', {
'link_settings': {
'libraries': [
'-lcom_err'
]
}
}]
]
}],
['OS=="win"', {
'sources': [
'src/win32/kerberos_sspi.cc',
'src/win32/kerberos_win32.cc'
],
'link_settings': {
'libraries': [
'-lcrypt32',
'-lsecur32',
'-lShlwapi'
]
}
}]
]
}
]
} | {
'targets': [
{
'target_name': 'kerberos',
'include_dirs': [ '<!(node -e "require(\'nan\')")' ],
'sources': [
'src/kerberos.cc'
],
'xcode_settings': {
'MACOSX_DEPLOYMENT_TARGET': '10.12',
'OTHER_CFLAGS': [
"-std=c++11",
"-stdlib=libc++"
],
},
'conditions': [
['OS=="mac" or OS=="linux"', {
'sources': [
'src/unix/base64.cc',
'src/unix/kerberos_gss.cc',
'src/unix/kerberos_unix.cc'
],
'link_settings': {
'libraries': [
'-lkrb5',
'-lgssapi_krb5'
]
}
}],
['OS=="win"', {
'sources': [
'src/win32/kerberos_sspi.cc',
'src/win32/kerberos_win32.cc'
],
'link_settings': {
'libraries': [
'crypt32.lib',
'secur32.lib',
'Shlwapi.lib'
]
}
}]
]
}
]
} | Python | 0.000001 |
b6208c1f9b6f0afca1dff40a66d2c915594b1946 | Add exception hook to help diagnose server test errors in python3 gui mode | blaze/io/server/tests/start_simple_server.py | blaze/io/server/tests/start_simple_server.py | """
Starts a Blaze server for tests.
$ start_test_server.py /path/to/catalog_config.yaml <portnumber>
"""
import sys, os
if os.name == 'nt':
old_excepthook = sys.excepthook
# Exclude this from our autogenerated API docs.
undoc = lambda func: func
@undoc
def gui_excepthook(exctype, value, tb):
try:
import ctypes, traceback
MB_ICONERROR = 0x00000010
title = u'Error starting test Blaze server'
msg = u''.join(traceback.format_exception(exctype, value, tb))
ctypes.windll.user32.MessageBoxW(0, msg, title, MB_ICONERROR)
finally:
# Also call the old exception hook to let it do
# its thing too.
old_excepthook(exctype, value, tb)
sys.excepthook = gui_excepthook
import blaze
from blaze.io.server.app import app
blaze.catalog.load_config(sys.argv[1])
app.run(port=int(sys.argv[2]), use_reloader=False)
| """
Starts a Blaze server for tests.
$ start_test_server.py /path/to/catalog_config.yaml <portnumber>
"""
import sys, os
import blaze
from blaze.io.server.app import app
blaze.catalog.load_config(sys.argv[1])
app.run(port=int(sys.argv[2]), use_reloader=False)
| Python | 0 |
826698c9894ce94c625718eb041ce817eb6ab5ef | Update config.dist.py | boiler/boiler_template/config/config.dist.py | boiler/boiler_template/config/config.dist.py | from project.backend import config
class DefaultConfig(config.DefaultConfig):
""" Local development config """
# set this for offline mode
SERVER_NAME = None
SECRET_KEY = None
class DevConfig(config.DevConfig, DefaultConfig):
""" Local development config """
pass
class TestingConfig(config.TestingConfig, DefaultConfig):
""" Local testing config """
pass
| from project.backend import config
class DefaultConfig(config.DefaultConfig):
""" Local development config """
# set this for offline mode
SERVER_NAME = None
SECRET_KEY = None
class DevConfig(config.DevConfig, DefaultConfig):
""" Local development config """
pass
class TestingConfig(config.TestingConfig, DefaultConfig):
""" Local testing config """
| Python | 0.000002 |
4c4b1e6a4bde5edb9e11942245a21437e73fe6df | fix link creation | archivebox/index/sql.py | archivebox/index/sql.py | __package__ = 'archivebox.index'
from io import StringIO
from typing import List, Tuple, Iterator
from .schema import Link
from ..util import enforce_types
from ..config import setup_django, OUTPUT_DIR
### Main Links Index
@enforce_types
def parse_sql_main_index(out_dir: str=OUTPUT_DIR) -> Iterator[Link]:
setup_django(out_dir, check_db=True)
from core.models import Snapshot
return (
Link.from_json(page.as_json(*Snapshot.keys))
for page in Snapshot.objects.all()
)
@enforce_types
def write_sql_main_index(links: List[Link], out_dir: str=OUTPUT_DIR) -> None:
setup_django(out_dir, check_db=True)
from core.models import Snapshot
from django.db import transaction
with transaction.atomic():
for link in links:
info = {k: v for k, v in link._asdict().items() if k in Snapshot.keys}
Snapshot.objects.update_or_create(url=link.url, defaults=info)
@enforce_types
def write_sql_link_details(link: Link, out_dir: str=OUTPUT_DIR) -> None:
setup_django(out_dir, check_db=True)
from core.models import Snapshot
from django.db import transaction
with transaction.atomic():
snap = Snapshot.objects.get(url=link.url, timestamp=link.timestamp)
snap.title = link.title
snap.tags = link.tags
snap.save()
@enforce_types
def list_migrations(out_dir: str=OUTPUT_DIR) -> List[Tuple[bool, str]]:
setup_django(out_dir, check_db=False)
from django.core.management import call_command
out = StringIO()
call_command("showmigrations", list=True, stdout=out)
out.seek(0)
migrations = []
for line in out.readlines():
if line.strip() and ']' in line:
status_str, name_str = line.strip().split(']', 1)
is_applied = 'X' in status_str
migration_name = name_str.strip()
migrations.append((is_applied, migration_name))
return migrations
@enforce_types
def apply_migrations(out_dir: str=OUTPUT_DIR) -> List[str]:
setup_django(out_dir, check_db=False)
from django.core.management import call_command
null, out = StringIO(), StringIO()
call_command("makemigrations", interactive=False, stdout=null)
call_command("migrate", interactive=False, stdout=out)
out.seek(0)
return [line.strip() for line in out.readlines() if line.strip()]
@enforce_types
def get_admins(out_dir: str=OUTPUT_DIR) -> List[str]:
setup_django(out_dir, check_db=False)
from django.contrib.auth.models import User
return User.objects.filter(is_superuser=True)
| __package__ = 'archivebox.index'
from io import StringIO
from typing import List, Tuple, Iterator
from .schema import Link
from ..util import enforce_types
from ..config import setup_django, OUTPUT_DIR
### Main Links Index
@enforce_types
def parse_sql_main_index(out_dir: str=OUTPUT_DIR) -> Iterator[Link]:
setup_django(out_dir, check_db=True)
from core.models import Snapshot
return (
Link.from_json(page.as_json(*Snapshot.keys))
for page in Snapshot.objects.all()
)
@enforce_types
def write_sql_main_index(links: List[Link], out_dir: str=OUTPUT_DIR) -> None:
setup_django(out_dir, check_db=True)
from core.models import Snapshot
from django.db import transaction
with transaction.atomic():
for link in links:
info = {k: v for k, v in link._asdict().items() if k in Snapshot.keys}
Snapshot.objects.update_or_create(url=url, defaults=info)
@enforce_types
def write_sql_link_details(link: Link, out_dir: str=OUTPUT_DIR) -> None:
setup_django(out_dir, check_db=True)
from core.models import Snapshot
from django.db import transaction
with transaction.atomic():
snap = Snapshot.objects.get(url=link.url, timestamp=link.timestamp)
snap.title = link.title
snap.tags = link.tags
snap.save()
@enforce_types
def list_migrations(out_dir: str=OUTPUT_DIR) -> List[Tuple[bool, str]]:
setup_django(out_dir, check_db=False)
from django.core.management import call_command
out = StringIO()
call_command("showmigrations", list=True, stdout=out)
out.seek(0)
migrations = []
for line in out.readlines():
if line.strip() and ']' in line:
status_str, name_str = line.strip().split(']', 1)
is_applied = 'X' in status_str
migration_name = name_str.strip()
migrations.append((is_applied, migration_name))
return migrations
@enforce_types
def apply_migrations(out_dir: str=OUTPUT_DIR) -> List[str]:
setup_django(out_dir, check_db=False)
from django.core.management import call_command
null, out = StringIO(), StringIO()
call_command("makemigrations", interactive=False, stdout=null)
call_command("migrate", interactive=False, stdout=out)
out.seek(0)
return [line.strip() for line in out.readlines() if line.strip()]
@enforce_types
def get_admins(out_dir: str=OUTPUT_DIR) -> List[str]:
setup_django(out_dir, check_db=False)
from django.contrib.auth.models import User
return User.objects.filter(is_superuser=True)
| Python | 0 |
efdf4a4898cc3b5217ac5e45e75a74e19eee95d4 | bump version | evojax/version.py | evojax/version.py | __version__ = "0.1.0-14"
| __version__ = "0.1.0-13"
| Python | 0 |
155c953f7bf8590b4a11547369bee29baa5ea5f6 | Fix typo. | isaactest/tests/numeric_q_all_correct.py | isaactest/tests/numeric_q_all_correct.py | import time
from ..utils.log import log, INFO, ERROR, PASS
from ..utils.isaac import answer_numeric_q
from ..utils.i_selenium import assert_tab, image_div
from ..utils.i_selenium import wait_for_xpath_element
from ..tests import TestWithDependency
from selenium.common.exceptions import TimeoutException, NoSuchElementException
__all__ = ["numeric_q_all_correct"]
#####
# Test : Numeric Questions Correct Answers
#####
@TestWithDependency("NUMERIC_Q_ALL_CORRECT", ["NUMERIC_Q_UNITS_SELECT"])
def numeric_q_all_correct(driver, ISAAC_WEB, WAIT_DUR):
"""Test if numeric questions can be answered correctly.
- 'driver' should be a Selenium WebDriver.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
"""
assert_tab(driver, ISAAC_WEB)
time.sleep(WAIT_DUR)
try:
num_question = driver.find_element_by_xpath("//div[@ng-switch-when='isaacNumericQuestion']")
except NoSuchElementException:
log(ERROR, "Can't find the numeric question; can't continue!")
return False
log(INFO, "Attempt to enter correct answer.")
if not answer_numeric_q(num_question, "2.01", "\units{ m\,s^{-1} }", wait_dur=WAIT_DUR):
log(ERROR, "Couldn't answer Numeric Question; can't continue!")
return False
time.sleep(WAIT_DUR)
try:
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacNumericQuestion']//h1[text()='Correct!']")
log(INFO, "A 'Correct!' message was displayed as expected.")
wait_for_xpath_element(driver, "(//div[@ng-switch-when='isaacNumericQuestion']//p[text()='This is a correct choice.'])[2]")
log(INFO, "The editor entered explanation text was correctly shown.")
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacNumericQuestion']//strong[text()='Well done!']")
log(INFO, "The 'Well done!' message was correctly shown.")
time.sleep(WAIT_DUR)
log(PASS, "Numeric Question 'correct value, correct unit' behavior as expected.")
return True
except TimeoutException:
image_div(driver, "ERROR_numeric_q_all_correct")
log(ERROR, "The messages shown for a correct answer were not all displayed; see 'ERROR_numeric_q_all_correct.png'!")
return False
| import time
from ..utils.log import log, INFO, ERROR, PASS
from ..utils.isaac import answer_numeric_q
from ..utils.i_selenium import assert_tab, image_div
from ..utils.i_selenium import wait_for_xpath_element
from ..tests import TestWithDependency
from selenium.common.exceptions import TimeoutException, NoSuchElementException
__all__ = ["numeric_q_all_correct"]
#####
# Test : Numeric Questions Correct Answers
#####
@TestWithDependency("NUMERIC_Q_ALL_CORRECT", ["NUMERIC_Q_UNITS_SELECT"])
def numeric_q_all_correct(driver, ISAAC_WEB, WAIT_DUR):
"""Test is numeric questions can be answered correctly.
- 'driver' should be a Selenium WebDriver.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
"""
assert_tab(driver, ISAAC_WEB)
time.sleep(WAIT_DUR)
try:
num_question = driver.find_element_by_xpath("//div[@ng-switch-when='isaacNumericQuestion']")
except NoSuchElementException:
log(ERROR, "Can't find the numeric question; can't continue!")
return False
log(INFO, "Attempt to enter correct answer.")
if not answer_numeric_q(num_question, "2.01", "\units{ m\,s^{-1} }", wait_dur=WAIT_DUR):
log(ERROR, "Couldn't answer Numeric Question; can't continue!")
return False
time.sleep(WAIT_DUR)
try:
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacNumericQuestion']//h1[text()='Correct!']")
log(INFO, "A 'Correct!' message was displayed as expected.")
wait_for_xpath_element(driver, "(//div[@ng-switch-when='isaacNumericQuestion']//p[text()='This is a correct choice.'])[2]")
log(INFO, "The editor entered explanation text was correctly shown.")
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacNumericQuestion']//strong[text()='Well done!']")
log(INFO, "The 'Well done!' message was correctly shown.")
time.sleep(WAIT_DUR)
log(PASS, "Numeric Question 'correct value, correct unit' behavior as expected.")
return True
except TimeoutException:
image_div(driver, "ERROR_numeric_q_all_correct")
log(ERROR, "The messages shown for a correct answer were not all displayed; see 'ERROR_numeric_q_all_correct.png'!")
return False
| Python | 0.001604 |
aabc4bc60f0c8b6db21453dd6fad387773b18e55 | Fix a print | openquake/commands/__main__.py | openquake/commands/__main__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2015-2018 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import importlib
from openquake.baselib import sap
from openquake.commonlib import __version__
from openquake import commands
PY_VER = sys.version_info[:3]
# check for Python version
if PY_VER < (3, 5):
sys.exit('Python 3.5+ is required, you are using %s', sys.executable)
elif PY_VER < (3, 6):
print('DeprecationWarning: Python %s.%s.%s is deprecated. '
'Please upgrade to Python 3.6+' % PY_VER)
# force cluster users to use `oq engine` so that we have centralized logs
if os.environ['OQ_DISTRIBUTE'] == 'celery' and 'run' in sys.argv:
print('You are on a cluster and you are using oq run?? '
'Use oq engine --run instead!')
def oq():
modnames = ['openquake.commands.%s' % mod[:-3]
for mod in os.listdir(commands.__path__[0])
if mod.endswith('.py') and not mod.startswith('_')]
for modname in modnames:
importlib.import_module(modname)
parser = sap.compose(sap.Script.registry.values(),
prog='oq', version=__version__)
parser.callfunc()
if __name__ == '__main__':
oq()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2015-2018 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import importlib
from openquake.baselib import sap
from openquake.commonlib import __version__
from openquake import commands
PY_VER = sys.version_info[:3]
# check for Python version
if PY_VER < (3, 5):
sys.exit('Python 3.5+ is required, you are using %s', sys.executable)
elif PY_VER < (3, 6):
print('Warning: Python %s.%s.%s is deprecated. '
'Please upgrade to Python 3.6+' % PY_VER)
# force cluster users to use `oq engine` so that we have centralized logs
if os.environ['OQ_DISTRIBUTE'] == 'celery' and 'run' in sys.argv:
print('You are on a cluster and you are using oq run?? '
'Use oq engine --run instead!')
def oq():
modnames = ['openquake.commands.%s' % mod[:-3]
for mod in os.listdir(commands.__path__[0])
if mod.endswith('.py') and not mod.startswith('_')]
for modname in modnames:
importlib.import_module(modname)
parser = sap.compose(sap.Script.registry.values(),
prog='oq', version=__version__)
parser.callfunc()
if __name__ == '__main__':
oq()
| Python | 0.999999 |
df8848beffeb952f8da034c13d22245ce123f576 | fix 677: Error on enum type during manage.py migrations | shop/models/fields.py | shop/models/fields.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import enum
import six
from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible, string_types
from django.utils.translation import ugettext_lazy as _, ugettext
postgresql_engine_names = [
'django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2',
]
if settings.DATABASES['default']['ENGINE'] in postgresql_engine_names:
from django.contrib.postgres.fields import JSONField as _JSONField
else:
from jsonfield.fields import JSONField as _JSONField
class JSONField(_JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(JSONField, self).deconstruct()
del kwargs['default']
return name, path, args, kwargs
class ChoiceEnumMeta(enum.EnumMeta):
def __call__(cls, value, *args, **kwargs):
if isinstance(value, string_types):
try:
value = cls.__members__[value]
except KeyError:
pass # let the super method complain
return super(ChoiceEnumMeta, cls).__call__(value, *args, **kwargs)
@python_2_unicode_compatible
class ChoiceEnum(six.with_metaclass(ChoiceEnumMeta, enum.Enum)):
"""
Utility class to handle choices in Django model fields
"""
def __str__(self):
return ugettext('.'.join((self.__class__.__name__, self.name)))
@classmethod
def default(cls):
try:
return next(iter(cls))
except StopIteration:
return None
@classmethod
def choices(cls):
choices = [(c.value, str(c)) for c in cls]
return choices
class ChoiceEnumField(models.PositiveSmallIntegerField):
description = _("Customer recognition state")
def __init__(self, *args, **kwargs):
self.enum_type = kwargs.pop('enum_type', ChoiceEnum) # fallback is required form migrations
if not issubclass(self.enum_type, ChoiceEnum):
raise ValueError("enum_type must be a subclass of `ChoiceEnum`.")
kwargs.update(choices=self.enum_type.choices())
kwargs.setdefault('default', self.enum_type.default())
super(ChoiceEnumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(ChoiceEnumField, self).deconstruct()
if 'choices' in kwargs:
del kwargs['choices']
if kwargs['default'] is self.enum_type.default():
del kwargs['default']
elif isinstance(kwargs['default'], self.enum_type):
kwargs['default'] = kwargs['default'].value
return name, path, args, kwargs
def from_db_value(self, value, expression, connection, context):
try:
return self.enum_type(value)
except ValueError:
return value
def get_prep_value(self, state):
if isinstance(state, self.enum_type):
return state.value
return state
def to_python(self, state):
return self.enum_type(state)
def value_to_string(self, obj):
value = getattr(obj, self.name)
if not isinstance(value, self.enum_type):
raise ValueError("Value must be of type {}".format(self.enum_type))
return value.name
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import enum
import six
from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible, string_types
from django.utils.translation import ugettext_lazy as _, ugettext
postgresql_engine_names = [
'django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2',
]
if settings.DATABASES['default']['ENGINE'] in postgresql_engine_names:
from django.contrib.postgres.fields import JSONField as _JSONField
else:
from jsonfield.fields import JSONField as _JSONField
class JSONField(_JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(JSONField, self).deconstruct()
del kwargs['default']
return name, path, args, kwargs
class ChoiceEnumMeta(enum.EnumMeta):
def __call__(cls, value, *args, **kwargs):
if isinstance(value, string_types):
try:
value = cls.__members__[value]
except KeyError:
pass # let the super method complain
return super(ChoiceEnumMeta, cls).__call__(value, *args, **kwargs)
@python_2_unicode_compatible
class ChoiceEnum(six.with_metaclass(ChoiceEnumMeta, enum.Enum)):
"""
Utility class to handle choices in Django model fields
"""
def __str__(self):
return ugettext('.'.join((self.__class__.__name__, self.name)))
@classmethod
def default(cls):
try:
return next(iter(cls))
except StopIteration:
return None
@classmethod
def choices(cls):
choices = [(c.value, str(c)) for c in cls]
return choices
class ChoiceEnumField(models.PositiveSmallIntegerField):
description = _("Customer recognition state")
def __init__(self, *args, **kwargs):
self.enum_type = kwargs.pop('enum_type', ChoiceEnum) # fallback is required form migrations
if not issubclass(self.enum_type, ChoiceEnum):
raise ValueError("enum_type must be a subclass of `ChoiceEnum`.")
kwargs.update(choices=self.enum_type.choices())
kwargs.setdefault('default', self.enum_type.default())
super(ChoiceEnumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(ChoiceEnumField, self).deconstruct()
if 'choices' in kwargs:
del kwargs['choices']
if kwargs['default'] is self.enum_type.default():
del kwargs['default']
elif isinstance(kwargs['default'], self.enum_type):
kwargs['default'] = kwargs['default'].value
return name, path, args, kwargs
def from_db_value(self, value, expression, connection, context):
try:
return self.enum_type(value)
except ValueError:
return value
def get_prep_value(self, state):
if isinstance(state, self.enum_type):
return state.value
if isinstance(state, int):
return state
raise ValueError("Value must be of type {}".format(self.enum_type))
def to_python(self, state):
return self.enum_type(state)
def value_to_string(self, obj):
value = getattr(obj, self.name)
if not isinstance(value, self.enum_type):
raise ValueError("Value must be of type {}".format(self.enum_type))
return value.name
| Python | 0 |
8b69b3af8b7ed9dcbd00f2b22a47828627dc7c78 | fix setup | zgres/tests/test_apt.py | zgres/tests/test_apt.py | import os
from unittest import mock
import asyncio
from subprocess import check_output, check_call
import pytest
import psycopg2
from . import FakeSleeper
def have_root():
destroy = os.environ.get('ZGRES_DESTROY_MACHINE', 'false').lower()
if destroy in ['t', 'true']:
user = check_output(['whoami']).decode('latin1').strip()
if user != 'root':
raise Exception('I need to run as root if you want me to destroy the machine! I am {}'.format(repr(user)))
return True
return False
needs_root = pytest.mark.skipif(not have_root(), reason='requires root and ZGRES_DESTROY_MACHINE=true in the environment')
@pytest.fixture
def cluster():
return ('9.4', 'zgres_test')
@pytest.fixture
def plugin(cluster):
pg_version, cluster_name = cluster
app = mock.Mock()
app.config = dict(
apt=dict(
postgresql_version=pg_version,
postgresql_cluster_name=cluster_name,
create_superuser=True))
from ..apt import AptPostgresqlPlugin
return AptPostgresqlPlugin('zgres#apt', app)
def test_config_file(plugin, cluster):
assert plugin._config_file(name='pg_hba.conf') == '/etc/postgresql/{}/{}/pg_hba.conf'.format(*cluster)
@pytest.mark.asyncio
async def test_monitoring(plugin, cluster):
with mock.patch('zgres.apt.sleep') as sleep, mock.patch('zgres.apt.call') as subprocess_call:
retvals = [
0, # become healthy
1, # noop
0, 0, # become healthy
6, 5, # become unhelathy after 2 failed checks
0, # become healthy
]
subprocess_call.side_effect = retvals
sleeper = FakeSleeper(max_loops=len(retvals) + 1)
sleep.side_effect = sleeper
plugin.start_monitoring()
await sleeper.wait()
assert plugin.app.mock_calls == [
mock.call.unhealthy(('zgres#apt', 'systemd'), 'Waiting for first systemd check'),
mock.call.healthy(('zgres#apt', 'systemd')),
mock.call.healthy(('zgres#apt', 'systemd')),
mock.call.unhealthy(('zgres#apt', 'systemd'), 'inactive according to systemd'),
mock.call.healthy(('zgres#apt', 'systemd')),
]
subprocess_call.assert_has_calls(
[mock.call(['systemctl', 'is-active', 'postgresql@{}-{}.service'.format(*cluster)]),
] * len(retvals))
@needs_root
def test_travis(plugin, cluster):
plugin.pg_initdb()
plugin.pg_start()
conn_info = plugin.pg_connect_info()
with psycopg2.connect(**conn_info) as conn:
with conn.cursor() as cur:
cur.execute('SELECT version(), current_database();')
got_ver, got_db = cur.fetchall()[0]
assert got_ver == cluster[0]
assert got_db == 'PostgreSQL {}'.format(cluster[1])
check_call(['pg_dropcluster'] + list(cluster))
| import os
from unittest import mock
import asyncio
from subprocess import check_output, check_call
import pytest
import psycopg2
from . import FakeSleeper
def have_root():
destroy = os.environ.get('ZGRES_DESTROY_MACHINE', 'false').lower()
if destroy in ['t', 'true']:
user = check_output(['whoami']).decode('latin1').strip()
if user != 'root':
raise Exception('I need to run as root if you want me to destroy the machine! I am {}'.format(repr(user)))
return True
return False
needs_root = pytest.mark.skipif(not have_root(), reason='requires root and ZGRES_DESTROY_MACHINE=true in the environment')
@pytest.fixture
def cluster():
return ('9.4', 'zgres_test')
@pytest.fixture
def plugin(cluster):
pg_version, cluster_name = cluster
app = mock.Mock()
app.config = dict(
apt=dict(
postgresql_version=pg_version,
postgresql_cluster_name=cluster_name))
from ..apt import AptPostgresqlPlugin
return AptPostgresqlPlugin('zgres#apt', app)
def test_config_file(plugin, cluster):
assert plugin._config_file(name='pg_hba.conf') == '/etc/postgresql/{}/{}/pg_hba.conf'.format(*cluster)
@pytest.mark.asyncio
async def test_monitoring(plugin, cluster):
with mock.patch('zgres.apt.sleep') as sleep, mock.patch('zgres.apt.call') as subprocess_call:
retvals = [
0, # become healthy
1, # noop
0, 0, # become healthy
6, 5, # become unhelathy after 2 failed checks
0, # become healthy
]
subprocess_call.side_effect = retvals
sleeper = FakeSleeper(max_loops=len(retvals) + 1)
sleep.side_effect = sleeper
plugin.start_monitoring()
await sleeper.wait()
assert plugin.app.mock_calls == [
mock.call.unhealthy(('zgres#apt', 'systemd'), 'Waiting for first systemd check'),
mock.call.healthy(('zgres#apt', 'systemd')),
mock.call.healthy(('zgres#apt', 'systemd')),
mock.call.unhealthy(('zgres#apt', 'systemd'), 'inactive according to systemd'),
mock.call.healthy(('zgres#apt', 'systemd')),
]
subprocess_call.assert_has_calls(
[mock.call(['systemctl', 'is-active', 'postgresql@{}-{}.service'.format(*cluster)]),
] * len(retvals))
@needs_root
def test_travis(plugin, cluster):
plugin.pg_initdb()
plugin.pg_start()
conn_info = plugin.pg_connect_info()
with psycopg2.connect(**conn_info) as conn:
with conn.cursor() as cur:
cur.execute('SELECT version(), current_database();')
got_ver, got_db = cur.fetchall()[0]
assert got_ver == cluster[0]
assert got_db == 'PostgreSQL {}'.format(cluster[1])
check_call(['pg_dropcluster'] + list(cluster))
| Python | 0.000001 |
2a13f4d21085228a1ef615eec8a3e42110c315d3 | Make test pass | benchmarker/modules/problems/cnn2d_toy/pytorch.py | benchmarker/modules/problems/cnn2d_toy/pytorch.py | import torch
import torch.nn as nn
import torch.nn.functional as F
from benchmarker.modules.problems.helpers_torch import Net4Inference, Net4Train
class Net(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=2)
self.conv2 = nn.Conv2d(in_channels=32, out_channels=32, kernel_size=2)
# TODO: make sure we check cnt_classes
self.dense1 = nn.Linear(1577088, 2)
def __call__(self, x):
h = x
h = self.conv1(h)
h = F.relu(h)
h = self.conv2(h)
h = F.relu(h)
h = torch.flatten(h, 1)
h = self.dense1(h)
return h
# TODO: this can be reused as well
def get_kernel(params, unparsed_args=None):
net = Net()
if params["mode"] == "inference":
net = Net4Inference(net)
else:
net = Net4Train(net)
return net
| import torch
import torch.nn as nn
import torch.nn.functional as F
from benchmarker.modules.problems.helpers_torch import Net4Inference, Net4Train
class Net(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=2)
self.conv2 = nn.Conv2d(in_channels=32, out_channels=32, kernel_size=2)
# TODO: make sure we check cnt_classes
self.dense1 = nn.Linear(1577088, 2)
def __call__(self, x):
h = x
h = self.conv1(h)
h = F.relu(h)
h = self.conv2(h)
h = F.relu(h)
h = torch.flatten(h, 1)
h = self.dense1(h)
return h
# TODO: this can be reused as well
def get_kernel(net, params, unparsed_args=None):
if params["mode"] == "inference":
net = Net4Inference(net)
else:
net = Net4Train(net)
return net
| Python | 0.000225 |
f6e93144a2471ef22883f4db935a499463a76824 | fix sytanx errors | will/0003/into_redis.py | will/0003/into_redis.py | # 第 0003 题: 将 0001 题生成的 200 个激活码(或者优惠券)保存到 Redis 非关系型数据库中。
import random, string, time, math, uuid, redis
chars = string.ascii_letters + string.digits
def gen1():
key = ''.join(random.sample(chars, 10))
#key2 = ''.join(random.choice(chars) for i in range(10))
return key
def gen2():
key = math.modf(time.time())[0]
return key
def gen3():
return uuid.uuid4()
if __name__ == '__main__':
r = redis.Redis(host='localhost', port=6379, db=0)
# r.set('name', 'will')
# print(r.get('name'))
for i in range(200):
r.sadd('code', gen1())
r.save()
| # 第 0003 题: 将 0001 题生成的 200 个激活码(或者优惠券)保存到 Redis 非关系型数据库中。
import random, string, time, math, uuid, redis
chars = string.ascii_letters + string.digits
def gen1():
key = ''.join(random.sample(chars, 10))
#key2 = ''.join(random.choice(chars) for i in range(10))
return key
def gen2():
key = math.modf(time.time())[0]
return key
def gen3():
return uuid.uuid4()
if '__name__' == '__main__':
r = redis.Redis(host='localhost', port=6379, db=0)
# r.set('name', 'will')
# print(r.get('name'))
for i in range(200):
r.sadd('code', gen1())
r.save()
| Python | 0.000031 |
3b564cdd4adbf3185d2f18ec6eedbf4b87057cf5 | Add virus fixture to conftest | conftest.py | conftest.py | from virtool.tests.fixtures.db import *
from virtool.tests.fixtures.documents import *
from virtool.tests.fixtures.client import *
from virtool.tests.fixtures.core import *
from virtool.tests.fixtures.hmm import *
from virtool.tests.fixtures.users import *
from virtool.tests.fixtures.viruses import *
def pytest_addoption(parser):
parser.addoption("--quick", action="store_true", help="Skip slower tests")
| from virtool.tests.fixtures.db import *
from virtool.tests.fixtures.documents import *
from virtool.tests.fixtures.client import *
from virtool.tests.fixtures.core import *
from virtool.tests.fixtures.hmm import *
from virtool.tests.fixtures.users import *
def pytest_addoption(parser):
parser.addoption("--quick", action="store_true", help="Skip slower tests")
| Python | 0 |
8b2827a87927e60cefb83f273b58d9aba9f9600d | improve error representation for doctests with cython | conftest.py | conftest.py | # -*- coding: utf-8 -*-
import os
import sys
import pytest
from inspect import currentframe, getframeinfo
from sphinx.application import Sphinx
import imgui
PROJECT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
sphinx = None
def project_path(*paths):
return os.path.join(PROJECT_ROOT_DIR, *paths)
class SphinxDoc(pytest.File):
def __init__(self, path, parent):
# yuck!
global sphinx
if sphinx is None:
os.environ['SPHINX_DISABLE_RENDER'] = '1'
sphinx = Sphinx(
srcdir=project_path('doc', 'source'),
confdir=project_path('doc', 'source'),
outdir=project_path('doc', 'build', 'vistest'),
doctreedir=project_path('doc', 'build', 'doctree'),
buildername='vistest',
)
super(SphinxDoc, self).__init__(path, parent)
def collect(self):
# build only specified file
sphinx.build(filenames=[self.fspath.relto(project_path())])
return [
DocItem(name, self, code)
for (name, code) in sphinx.builder.snippets
]
class DocItem(pytest.Item):
def __init__(self, name, parent, code):
super(DocItem, self).__init__(name, parent)
self.code = code
def runtest(self):
self.exec_snippet(self.code)
def exec_snippet(self, source):
code = compile(source, '<str>', 'exec')
frameinfo = getframeinfo(currentframe())
io = imgui.get_io()
io.render_callback = lambda *args, **kwargs: None
io.delta_time = 1.0 / 60.0
io.display_size = 300, 300
# setup default font
io.fonts.get_tex_data_as_rgba32()
io.fonts.add_font_default()
io.fonts.texture_id = 0 # set any texture ID to avoid segfaults
imgui.new_frame()
try:
exec(code, locals(), globals())
except Exception as err:
# note: quick and dirty way to annotate sources with error marker
lines = source.split('\n')
lines.insert(sys.exc_info()[2].tb_next.tb_lineno, "^^^")
self.code = "\n".join(lines)
raise
imgui.render()
@staticmethod
def indent(text, width=4):
return "\n".join(
">" + " " * width + line
for line in text.split('\n')
)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
return "Visual example fail: {}\n\n{}\n\n{}".format(
excinfo,
self.indent(self.code),
excinfo.getrepr(funcargs=True, style='short')
)
def reportinfo(self):
return self.fspath, 0, "testcase: %s" % self.name
class ExecException(Exception):
pass
def pytest_collect_file(parent, path):
if path.ext == '.rst' and 'source' in path.dirname:
return SphinxDoc(path, parent)
| # -*- coding: utf-8 -*-
import os
import pytest
from sphinx.application import Sphinx
import imgui
PROJECT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
sphinx = None
def project_path(*paths):
return os.path.join(PROJECT_ROOT_DIR, *paths)
class SphinxDoc(pytest.File):
def __init__(self, path, parent):
# yuck!
global sphinx
if sphinx is None:
os.environ['SPHINX_DISABLE_RENDER'] = '1'
sphinx = Sphinx(
srcdir=project_path('doc', 'source'),
confdir=project_path('doc', 'source'),
outdir=project_path('doc', 'build', 'vistest'),
doctreedir=project_path('doc', 'build', 'doctree'),
buildername='vistest',
)
super(SphinxDoc, self).__init__(path, parent)
def collect(self):
# build only specified file
sphinx.build(filenames=[self.fspath.relto(project_path())])
return [
DocItem(name, self, code)
for (name, code) in sphinx.builder.snippets
]
class DocItem(pytest.Item):
def __init__(self, name, parent, code):
super(DocItem, self).__init__(name, parent)
self.code = code
def runtest(self):
self.exec_snippet(self.code)
@staticmethod
def exec_snippet(source):
code = compile(source, '<str>', 'exec')
io = imgui.get_io()
io.render_callback = lambda *args, **kwargs: None
io.delta_time = 1.0 / 60.0
io.display_size = 300, 300
# setup default font
io.fonts.get_tex_data_as_rgba32()
io.fonts.add_font_default()
io.fonts.texture_id = 0 # set any texture ID to avoid segfaults
imgui.new_frame()
exec(code, locals(), globals())
imgui.render()
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
return "\n".join([
"Documentation test execution for code:",
self.code,
"---",
str(excinfo)
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
def pytest_collect_file(parent, path):
if path.ext == '.rst' and 'source' in path.dirname:
return SphinxDoc(path, parent)
| Python | 0 |
900de7c14607fbe2936fa682d03747916337f075 | Fix the reactor_pytest fixture. | conftest.py | conftest.py | from pathlib import Path
import pytest
def _py_files(folder):
return (str(p) for p in Path(folder).rglob('*.py'))
collect_ignore = [
# not a test, but looks like a test
"scrapy/utils/testsite.py",
# contains scripts to be run by tests/test_crawler.py::CrawlerProcessSubprocess
*_py_files("tests/CrawlerProcess")
]
for line in open('tests/ignores.txt'):
file_path = line.strip()
if file_path and file_path[0] != '#':
collect_ignore.append(file_path)
@pytest.fixture()
def chdir(tmpdir):
"""Change to pytest-provided temporary directory"""
tmpdir.chdir()
def pytest_collection_modifyitems(session, config, items):
# Avoid executing tests when executing `--flake8` flag (pytest-flake8)
try:
from pytest_flake8 import Flake8Item
if config.getoption('--flake8'):
items[:] = [item for item in items if isinstance(item, Flake8Item)]
except ImportError:
pass
@pytest.fixture(scope='class')
def reactor_pytest(request):
if not request.cls:
# doctests
return
request.cls.reactor_pytest = request.config.getoption("--reactor")
return request.cls.reactor_pytest
@pytest.fixture(autouse=True)
def only_asyncio(request, reactor_pytest):
if request.node.get_closest_marker('only_asyncio') and reactor_pytest != 'asyncio':
pytest.skip('This test is only run with --reactor-asyncio')
| from pathlib import Path
import pytest
def _py_files(folder):
return (str(p) for p in Path(folder).rglob('*.py'))
collect_ignore = [
# not a test, but looks like a test
"scrapy/utils/testsite.py",
# contains scripts to be run by tests/test_crawler.py::CrawlerProcessSubprocess
*_py_files("tests/CrawlerProcess")
]
for line in open('tests/ignores.txt'):
file_path = line.strip()
if file_path and file_path[0] != '#':
collect_ignore.append(file_path)
@pytest.fixture()
def chdir(tmpdir):
"""Change to pytest-provided temporary directory"""
tmpdir.chdir()
def pytest_collection_modifyitems(session, config, items):
# Avoid executing tests when executing `--flake8` flag (pytest-flake8)
try:
from pytest_flake8 import Flake8Item
if config.getoption('--flake8'):
items[:] = [item for item in items if isinstance(item, Flake8Item)]
except ImportError:
pass
@pytest.fixture()
def reactor_pytest(request):
request.cls.reactor_pytest = request.config.getoption("--reactor")
return request.cls.reactor_pytest
@pytest.fixture(autouse=True)
def only_asyncio(request, reactor_pytest):
if request.node.get_closest_marker('only_asyncio') and reactor_pytest != 'asyncio':
pytest.skip('This test is only run with --reactor-asyncio')
| Python | 0 |
7059622c5787f06027ae2cf978beb69df4e5cabd | Send googler profiling data. | breakpad.py | breakpad.py | # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception.
It is only enabled when all these conditions are met:
1. hostname finishes with '.google.com'
2. main module name doesn't contain the word 'test'
3. no NO_BREAKPAD environment variable is defined
"""
import atexit
import getpass
import os
import socket
import sys
import time
import traceback
import urllib
import urllib2
# Configure these values.
DEFAULT_URL = 'https://chromium-status.appspot.com'
_REGISTERED = False
_TIME_STARTED = time.time()
def post(url, params):
"""HTTP POST with timeout when it's supported."""
kwargs = {}
if (sys.version_info[0] * 10 + sys.version_info[1]) >= 26:
kwargs['timeout'] = 4
request = urllib2.urlopen(url, urllib.urlencode(params), **kwargs)
out = request.read()
request.close()
return out
def FormatException(e):
"""Returns a human readable form of an exception.
Adds the maximum number of interesting information in the safest way."""
try:
out = repr(e)
except Exception:
out = ''
try:
out = str(e)
if isinstance(e, Exception):
# urllib exceptions, usually the HTTP headers.
if hasattr(e, 'headers'):
out += '\nHeaders: %s' % e.headers
if hasattr(e, 'url'):
out += '\nUrl: %s' % e.url
if hasattr(e, 'msg'):
out += '\nMsg: %s' % e.msg
# The web page in some urllib exceptions.
if hasattr(e, 'read') and callable(e.read):
out += '\nread(): %s' % e.read()
if hasattr(e, 'info') and callable(e.info):
out += '\ninfo(): %s' % e.info()
except Exception:
pass
return out
def SendStack(last_tb, stack, url=None, maxlen=50):
"""Sends the stack trace to the breakpad server."""
if not url:
url = DEFAULT_URL + '/breakpad'
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack[0:4096],
'user': getpass.getuser(),
'exception': FormatException(last_tb),
'host': socket.getfqdn(),
'cwd': os.getcwd(),
'version': sys.version,
}
# pylint: disable=W0702
print('\n'.join(' %s: %s' % (k, v[0:maxlen])
for k, v in params.iteritems()))
print(post(url, params))
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def SendProfiling(url=None):
try:
if not url:
url = DEFAULT_URL + '/profiling'
params = {
'argv': ' '.join(sys.argv),
'duration': time.time() - _TIME_STARTED,
'platform': sys.platform,
}
post(url, params)
except IOError:
pass
def CheckForException():
"""Runs at exit. Look if there was an exception active."""
last_value = getattr(sys, 'last_value', None)
if last_value:
if not isinstance(last_value, KeyboardInterrupt):
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(last_value, ''.join(traceback.format_tb(last_tb)))
else:
SendProfiling()
def Register():
"""Registers the callback at exit. Calling it multiple times is no-op."""
global _REGISTERED
if _REGISTERED:
return
_REGISTERED = True
atexit.register(CheckForException)
# Skip unit tests and we don't want anything from non-googler.
if (not 'test' in sys.modules['__main__'].__file__ and
not 'NO_BREAKPAD' in os.environ and
(socket.getfqdn().endswith('.google.com') or
socket.getfqdn().endswith('.chromium.org'))):
Register()
# Uncomment this line if you want to test it out.
#Register()
| # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception.
It is only enabled when all these conditions are met:
1. hostname finishes with '.google.com'
2. main module name doesn't contain the word 'test'
3. no NO_BREAKPAD environment variable is defined
"""
import atexit
import getpass
import os
import urllib
import traceback
import socket
import sys
# Configure these values.
DEFAULT_URL = 'https://chromium-status.appspot.com/breakpad'
_REGISTERED = False
def FormatException(e):
"""Returns a human readable form of an exception.
Adds the maximum number of interesting information in the safest way."""
try:
out = repr(e)
except Exception:
out = ''
try:
out = str(e)
if isinstance(e, Exception):
# urllib exceptions, usually the HTTP headers.
if hasattr(e, 'headers'):
out += '\nHeaders: %s' % e.headers
if hasattr(e, 'url'):
out += '\nUrl: %s' % e.url
if hasattr(e, 'msg'):
out += '\nMsg: %s' % e.msg
# The web page in some urllib exceptions.
if hasattr(e, 'read') and callable(e.read):
out += '\nread(): %s' % e.read()
if hasattr(e, 'info') and callable(e.info):
out += '\ninfo(): %s' % e.info()
except Exception:
pass
return out
def SendStack(last_tb, stack, url=None, maxlen=50):
"""Sends the stack trace to the breakpad server."""
if not url:
url = DEFAULT_URL
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack[0:4096],
'user': getpass.getuser(),
'exception': FormatException(last_tb),
'host': socket.getfqdn(),
'cwd': os.getcwd(),
'version': sys.version,
}
# pylint: disable=W0702
print('\n'.join(' %s: %s' % (k, v[0:maxlen])
for k, v in params.iteritems()))
request = urllib.urlopen(url, urllib.urlencode(params))
print(request.read())
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
"""Runs at exit. Look if there was an exception active."""
last_value = getattr(sys, 'last_value', None)
if last_value and not isinstance(last_value, KeyboardInterrupt):
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(last_value, ''.join(traceback.format_tb(last_tb)))
def Register():
"""Registers the callback at exit. Calling it multiple times is no-op."""
global _REGISTERED
if _REGISTERED:
return
_REGISTERED = True
atexit.register(CheckForException)
# Skip unit tests and we don't want anything from non-googler.
if (not 'test' in sys.modules['__main__'].__file__ and
not 'NO_BREAKPAD' in os.environ and
(socket.getfqdn().endswith('.google.com') or
socket.getfqdn().endswith('.chromium.org'))):
Register()
# Uncomment this line if you want to test it out.
#Register()
| Python | 0 |
01d4279b40eb9e3029f857bf9d81d66d0314532d | Bump version to 1.5.1 | enlighten/__init__.py | enlighten/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2017 - 2020 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten Progress Bar**
Provides progress bars and counters which play nice in a TTY console
"""
from enlighten.counter import Counter, SubCounter
from enlighten._manager import Manager, get_manager
__version__ = '1.5.1'
__all__ = ('Counter', 'Manager', 'SubCounter', 'get_manager')
| # -*- coding: utf-8 -*-
# Copyright 2017 - 2020 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten Progress Bar**
Provides progress bars and counters which play nice in a TTY console
"""
from enlighten.counter import Counter, SubCounter
from enlighten._manager import Manager, get_manager
__version__ = '1.5.0'
__all__ = ('Counter', 'Manager', 'SubCounter', 'get_manager')
| Python | 0 |
5c27ebd8e69802cce4afe51b917df233dcf4d972 | Add D3DCompiler_46.dll to ignore list Review URL: https://codereview.chromium.org/12217044 | site_config/config.py | site_config/config.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Declares a number of site-dependent variables for use by scripts.
A typical use of this module would be
import chromium_config as config
v8_url = config.Master.v8_url
"""
import os
from twisted.spread import banana
from config_bootstrap import config_private # pylint: disable=W0403,W0611
from config_bootstrap import Master # pylint: disable=W0403,W0611
# By default, the banana's string size limit is 640kb, which is unsufficient
# when passing diff's around. Raise it to 100megs. Do this here since the limit
# is enforced on both the server and the client so both need to raise the
# limit.
banana.SIZE_LIMIT = 100 * 1024 * 1024
def DatabaseSetup(buildmaster_config, require_dbconfig=False):
if os.path.isfile('.dbconfig'):
values = {}
execfile('.dbconfig', values)
if 'password' not in values:
raise Exception('could not get db password')
buildmaster_config['db_url'] = 'postgresql://%s:%s@%s/%s' % (
values['username'], values['password'],
values.get('hostname', 'localhost'), values['dbname'])
else:
assert(not require_dbconfig)
class Archive(config_private.Archive):
"""Build and data archival options."""
# List of symbol files to save, but not to upload to the symbol server
# (generally because they have no symbols and thus would produce an error).
# We have to list all the previous names of icudt*.dll. Now that we
# use icudt.dll, we don't need to update this file any more next time
# we pull in a new version of ICU.
symbols_to_skip_upload = [
'icudt38.dll', 'icudt42.dll', 'icudt46.dll', 'icudt.dll', 'rlz.dll',
'avcodec-53.dll', 'avcodec-54.dll', 'avformat-53.dll', 'avformat-54.dll',
'avutil-51.dll', 'd3dx9_42.dll', 'd3dx9_43.dll', 'D3DCompiler_42.dll',
'D3DCompiler_43.dll', 'D3DCompiler_46.dll', 'xinput1_3.dll',
'FlashPlayerApp.exe',]
if os.environ.get('CHROMIUM_BUILD', '') == '_google_chrome':
exes_to_skip_entirely = []
else:
# Skip any filenames (exes, symbols, etc.) starting with these strings
# entirely, typically because they're not built for this distribution.
exes_to_skip_entirely = ['rlz']
# Installer to archive.
installer_exe = 'mini_installer.exe'
# Test files to archive.
tests_to_archive = ['reliability_tests.exe',
'test_shell.exe',
'automated_ui_tests.exe',
'ui_tests.exe', # For syzygy (binary reorder) test bot
'icudt.dll',
'icudt38.dll',
'icudt42.dll',
'icudt46.dll',
'plugins\\npapi_layout_test_plugin.dll',
]
# Archive everything in these directories, using glob.
test_dirs_to_archive = ['fonts']
# Create these directories, initially empty, in the archive.
test_dirs_to_create = ['plugins', 'fonts']
# Directories in which to store built files, for dev, official, and full
# builds.
archive_host = config_private.Archive.archive_host
www_dir_base = config_private.Archive.www_dir_base
class Distributed(config_private.Distributed):
# File holding current version information.
version_file = 'VERSION'
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Declares a number of site-dependent variables for use by scripts.
A typical use of this module would be
import chromium_config as config
v8_url = config.Master.v8_url
"""
import os
from twisted.spread import banana
from config_bootstrap import config_private # pylint: disable=W0403,W0611
from config_bootstrap import Master # pylint: disable=W0403,W0611
# By default, the banana's string size limit is 640kb, which is unsufficient
# when passing diff's around. Raise it to 100megs. Do this here since the limit
# is enforced on both the server and the client so both need to raise the
# limit.
banana.SIZE_LIMIT = 100 * 1024 * 1024
def DatabaseSetup(buildmaster_config, require_dbconfig=False):
if os.path.isfile('.dbconfig'):
values = {}
execfile('.dbconfig', values)
if 'password' not in values:
raise Exception('could not get db password')
buildmaster_config['db_url'] = 'postgresql://%s:%s@%s/%s' % (
values['username'], values['password'],
values.get('hostname', 'localhost'), values['dbname'])
else:
assert(not require_dbconfig)
class Archive(config_private.Archive):
"""Build and data archival options."""
# List of symbol files to save, but not to upload to the symbol server
# (generally because they have no symbols and thus would produce an error).
# We have to list all the previous names of icudt*.dll. Now that we
# use icudt.dll, we don't need to update this file any more next time
# we pull in a new version of ICU.
symbols_to_skip_upload = [
'icudt38.dll', 'icudt42.dll', 'icudt46.dll', 'icudt.dll', 'rlz.dll',
'avcodec-53.dll', 'avcodec-54.dll', 'avformat-53.dll', 'avformat-54.dll',
'avutil-51.dll', 'd3dx9_42.dll', 'd3dx9_43.dll', 'D3DCompiler_42.dll',
'D3DCompiler_43.dll', 'xinput1_3.dll', 'FlashPlayerApp.exe',]
if os.environ.get('CHROMIUM_BUILD', '') == '_google_chrome':
exes_to_skip_entirely = []
else:
# Skip any filenames (exes, symbols, etc.) starting with these strings
# entirely, typically because they're not built for this distribution.
exes_to_skip_entirely = ['rlz']
# Installer to archive.
installer_exe = 'mini_installer.exe'
# Test files to archive.
tests_to_archive = ['reliability_tests.exe',
'test_shell.exe',
'automated_ui_tests.exe',
'ui_tests.exe', # For syzygy (binary reorder) test bot
'icudt.dll',
'icudt38.dll',
'icudt42.dll',
'icudt46.dll',
'plugins\\npapi_layout_test_plugin.dll',
]
# Archive everything in these directories, using glob.
test_dirs_to_archive = ['fonts']
# Create these directories, initially empty, in the archive.
test_dirs_to_create = ['plugins', 'fonts']
# Directories in which to store built files, for dev, official, and full
# builds.
archive_host = config_private.Archive.archive_host
www_dir_base = config_private.Archive.www_dir_base
class Distributed(config_private.Distributed):
# File holding current version information.
version_file = 'VERSION'
| Python | 0 |
8ef11bf983705540973badb40f7daf5a14c1173a | Fix typo | astrobin/permissions.py | astrobin/permissions.py | # Django
from django.db.models import Q
# Third party apps
from pybb.permissions import DefaultPermissionHandler
# AstroBin apps
from astrobin_apps_groups.models import Group
class CustomForumPermissions(DefaultPermissionHandler):
# Disable forum polls
def may_create_poll(self, user):
return False
def may_view_forum(self, user, forum):
may = super(CustomForumPermissions, self).may_view_forum(user, forum)
try:
if forum.group is not None:
if user.is_authenticated():
return may and (
forum.group.public or \
user == forum.group.owner or \
user in forum.group.members.all())
else:
return may and forum.group.public
except Group.DoesNotExist:
pass
return may
def filter_forums(self, user, qs):
f = super(CustomForumPermissions, self).filter_forums(user, qs)
if user.is_authenticated():
f = f.filter(
Q(group = None) |
Q(group__public = True) |
Q(group__owner = user) |
Q(group__members = user)).distinct()
else:
f = f.filter(Q(group = None) | Q(group__public = True))
return f
def may_view_topic(self, user, topic):
# Not using super because of:
# https://github.com/hovel/pybbm/issues/241
if user.is_superuser:
return True
if not user.is_staff and (topic.forum.hidden or topic.forum.category.hidden):
return False # only staff may see hidden forum / category
may = True
try:
if topic.forum.group is not None:
if user.is_authenticated():
may = topic.forum.group.public or \
user == topic.forum.group.owner or \
user in topic.forum.group.members.all()
else:
may = topic.forum.group.public
except Group.DoesNotExist:
pass
if topic.on_moderation:
if user.is_authenticated():
may = may and (user == topic.user or user in topic.forum.moderators.all())
return may
def filter_topics(self, user, qs):
f = super(CustomForumPermissions, self).filter_topics(user, qs)
if user.is_authenticated():
f = f.filter(
Q(forum__group = None) |
Q(forum__group__public = True) |
Q(forum__group__owner = user) |
Q(forum__group__members = user)).distinct()
else:
f = f.filter(Q(forum__group = None) | Q(forum__group__public = True))
return f
def may_create_topic(self, user, forum):
may = super(CustomForumPermissions, self).may_create_topic(user, forum)
try:
if forum.group is not None:
if forum.group.public:
return may
return may and (
user == forum.group.owner or
user in forum.group.members.all())
except Group.DoesNotExist:
pass
return may
def may_create_post(self, user, topic):
may = super(CustomForumPermissions, self).may_create_post(user, topic)
return may and self.may_create_topic(user, topic.forum)
| # Django
from django.db.models import Q
# Third party apps
from pybb.permissions import DefaultPermissionHandler
# AstroBin apps
from astrobin_apps_groups.models import Group
class CustomForumPermissions(DefaultPermissionHandler):
# Disable forum polls
def may_create_poll(self, user):
return False
def may_view_forum(self, user, forum):
may = super(CustomForumPermissions, self).may_view_forum(user, forum)
try:
if forum.group is not None:
if user.is_authenticated():
return may and (
forum.group.public or \
user == forum.group.owner or \
user in forum.group.members.all())
else:
return may and forum.group.public
except Group.DoesNotExist:
pass
return may
def filter_forums(self, user, qs):
f = super(CustomForumPermissions, self).filter_forums(user, qs)
if user.is_authenticated():
f = f.filter(
Q(group = None) |
Q(group__public = True) |
Q(group__owner = user) |
Q(group__members = user)).distinct()
else:
f = f.filter(Q(group = None) | Q(group__public = True))
return f
def may_view_topic(self, user, topic):
# Not using super because of:
# https://github.com/hovel/pybbm/issues/241
if user.is_superuser:
return True
if not user.is_staff and (topic.forum.hidden or topic.forum.category.hidden):
return False # only staff may see hidden forum / category
may = True
try:
if topic.forum.group is not None:
if user.is_authenticated():
may = topic.forum.group.public or \
user == topic.forum.grouop.owner or \
user in topic.forum.group.members.all()
else:
may = topic.forum.group.public
except Group.DoesNotExist:
pass
if topic.on_moderation:
if user.is_authenticated():
may = may and (user == topic.user or user in topic.forum.moderators.all())
return may
def filter_topics(self, user, qs):
f = super(CustomForumPermissions, self).filter_topics(user, qs)
if user.is_authenticated():
f = f.filter(
Q(forum__group = None) |
Q(forum__group__public = True) |
Q(forum__group__owner = user) |
Q(forum__group__members = user)).distinct()
else:
f = f.filter(Q(forum__group = None) | Q(forum__group__public = True))
return f
def may_create_topic(self, user, forum):
may = super(CustomForumPermissions, self).may_create_topic(user, forum)
try:
if forum.group is not None:
if forum.group.public:
return may
return may and (
user == forum.group.owner or
user in forum.group.members.all())
except Group.DoesNotExist:
pass
return may
def may_create_post(self, user, topic):
may = super(CustomForumPermissions, self).may_create_post(user, topic)
return may and self.may_create_topic(user, topic.forum)
| Python | 0.999999 |
b54d7b8079bf414b1fe79061b33e41c6350707d6 | use integer instead of string | mopidy_rotaryencoder/__init__.py | mopidy_rotaryencoder/__init__.py | from __future__ import unicode_literals
import logging
import os
from mopidy import config, ext
__version__ = '0.1.0'
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = 'Mopidy-RotaryEncoder'
ext_name = 'rotaryencoder'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['datapin'] = config.Integer()
schema['clkpin'] = config.Integer()
schema['swpin'] = config.Integer()
return schema
def setup(self, registry):
from .frontend import RotaryEncoderFrontend
registry.add('frontend', RotaryEncoderFrontend)
| from __future__ import unicode_literals
import logging
import os
from mopidy import config, ext
__version__ = '0.1.0'
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = 'Mopidy-RotaryEncoder'
ext_name = 'rotaryencoder'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['datapin'] = config.String()
schema['clkpin'] = config.String()
schema['swpin'] = config.String()
return schema
def setup(self, registry):
from .frontend import RotaryEncoderFrontend
registry.add('frontend', RotaryEncoderFrontend)
| Python | 0.027486 |
c2a79d8cbbb174530991d8b59578169ee9b2be44 | use absolute paths for external scripts in Spidermonkey wrapper | wrapper_spidermonkey.py | wrapper_spidermonkey.py | #!/usr/bin/env python
"""
wrapper for JSLint
requires Spidermonkey
Usage:
$ wrapper_spidermonkey.py <filepath>
TODO:
* support for JSLint options
"""
import sys
import os
import spidermonkey
from simplejson import loads as json
cwd = sys.path[0]
lint_path = os.path.join(cwd, "fulljslint.js")
json_path = os.path.join(cwd, "json2.js") # XXX: built in from Spidermonkey 1.8
def main(args=None):
filepath = args[1]
status, errors = lint(filepath)
print format(errors, filepath)
return status
def lint(filepath):
rt = spidermonkey.Runtime()
cx = rt.new_context()
options = {} # TODO: read from argument
cx.add_global("options", options)
cx.add_global("getFileContents", get_file_contents)
# load JavaScript code
for path in (lint_path, json_path):
cx.execute('eval(getFileContents("%s"));' % path)
cx.execute('var code = getFileContents("%s");' % filepath)
# lint code
status = cx.execute("JSLINT(code, options);") # True if clean, False otherwise
errors = cx.execute("JSON.stringify(JSLINT.errors);");
# XXX: errors incomplete (e.g. not reporting missing var)!?
return status, errors
def format(errors, file):
"""
convert JSLint errors object into report using standard error format
<filepath>:<line>:<column>:<message>
"""
lines = [":".join([
file,
str(error["line"] + 1),
str(error["character"] + 1),
error["reason"]
]) for error in json(errors)] # XXX: don't use generator expression!?
# XXX: ignoring members id, evidence, raw, a, b, c, d
return "\n".join(lines)
def get_file_contents(filepath):
return open(filepath).read()
if __name__ == "__main__":
status = not main(sys.argv)
sys.exit(status)
| #!/usr/bin/env python
"""
wrapper for JSLint
requires Spidermonkey
Usage:
$ wrapper_spidermonkey.py <filepath>
TODO:
* support for JSLint options
"""
import sys
import spidermonkey
from simplejson import loads as json
lint_path = "fulljslint.js"
json_path = "json2.js"
def main(args=None):
filepath = args[1]
status, errors = lint(filepath)
print format(errors, filepath)
return status
def lint(filepath):
rt = spidermonkey.Runtime()
cx = rt.new_context()
options = {} # TODO: read from argument
cx.add_global("options", options)
cx.add_global("getFileContents", get_file_contents)
# load JavaScript code
for path in (lint_path, json_path):
cx.execute('eval(getFileContents("%s"));' % path)
cx.execute('var code = getFileContents("%s");' % filepath)
# lint code
status = cx.execute("JSLINT(code, options);") # True if clean, False otherwise
errors = cx.execute("JSON.stringify(JSLINT.errors);");
# XXX: errors incomplete (e.g. not reporting missing var)!?
return status, errors
def format(errors, file):
"""
convert JSLint errors object into report using standard error format
<filepath>:<line>:<column>:<message>
"""
lines = [":".join([
file,
str(error["line"] + 1),
str(error["character"] + 1),
error["reason"]
]) for error in json(errors)] # XXX: don't use generator expression!?
# XXX: ignoring members id, evidence, raw, a, b, c, d
return "\n".join(lines)
def get_file_contents(filepath):
return open(filepath).read()
if __name__ == "__main__":
status = not main(sys.argv)
sys.exit(status)
| Python | 0 |
2271131d5c2794eeba256a9d9547fa925f7bdf73 | bump __version__ | matplotlib2tikz/__init__.py | matplotlib2tikz/__init__.py | # -*- coding: utf-8 -*-
#
'''Script to convert Matplotlib generated figures into TikZ/PGFPlots figures.
'''
__author__ = 'Nico Schlömer'
__email__ = 'nico.schloemer@gmail.com'
__copyright__ = 'Copyright (c) 2010-2016, %s <%s>' % (__author__, __email__)
__credits__ = []
__license__ = 'MIT License'
__version__ = '0.5.7'
__maintainer__ = 'Nico Schlömer'
__status__ = 'Production'
from matplotlib2tikz.save import save
| # -*- coding: utf-8 -*-
#
'''Script to convert Matplotlib generated figures into TikZ/PGFPlots figures.
'''
__author__ = 'Nico Schlömer'
__email__ = 'nico.schloemer@gmail.com'
__copyright__ = 'Copyright (c) 2010-2016, %s <%s>' % (__author__, __email__)
__credits__ = []
__license__ = 'MIT License'
__version__ = '0.5.6'
__maintainer__ = 'Nico Schlömer'
__status__ = 'Production'
from matplotlib2tikz.save import save
| Python | 0.000017 |
35d3284a1242bdeb6ea3aec128deb92b3138106b | Add the ability to specify the Cakefile parent directory. | flask_cake/cake.py | flask_cake/cake.py | from __future__ import absolute_import
import os
import subprocess
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
class Cake(object):
def __init__(self, app=None, tasks=["build"], cakeparent="coffee"):
"""Initalize a new instance of Flask-Cake.
:param app: The Flask app
:param tasks: A string containing a cake "task" to execute or a list
of multiple cake tasks to run. By default, this will run
``cake build``.
:param str cakeparent: The directory where the Cakefile is located
relative to Flask's `static_path`. By default,
this is `coffee/`, meaning that the Cakefile is
located at `static_path/coffee/Cakefile`.
"""
self.init_app(app, tasks, cakeparent)
def init_app(self, app, tasks, cakeparent):
"""Initalize a new instance of Flask-Cake.
:param app: The Flask app
:param tasks: A string containing a cake "task" to execute or a list
of multiple cake tasks to run. By default, this will run
``cake build``.
:param str cakeparent: The directory where the Cakefile is located
relative to Flask's `static_path`. By default,
this is `coffee/`, meaning that the Cakefile is
located at `static_path/coffee/Cakefile`.
"""
self.app = app
self.tasks = tasks
self.cakeparent = cakeparent
self._watchdog()
def _watchdog(self):
"""Runs Watchdog to listen to filesystem events.
When first run, the `Cakefile` is touched to trigger the
initial build.
"""
if not hasattr(self.app, 'static_url_path'):
from warnings import warn
warn(
DeprecationWarning('static_path is called static_url_path since Flask 0.7'),
stacklevel=2
)
static_url_path = self.app.static_path
else:
static_url_path = self.app.static_url_path
static_dir = self.app.root_path + static_url_path
cakedir = os.path.join(static_dir, self.cakeparent)
# Setup Watchdog
handler = Events(cakedir=cakedir, tasks=self.tasks)
observer = Observer(timeout=5000)
observer.schedule(handler, path=cakedir, recursive=True)
observer.start()
# "Touch" the Cakefile to signal the initial build
cakefile = os.path.join(cakedir, "Cakefile")
with file(cakefile, 'a'):
os.utime(cakefile, None)
class Events(FileSystemEventHandler):
"""Handler for all filesystem events."""
def __init__(self, cakedir, tasks):
super(Events, self).__init__()
self._cakedir = cakedir
self._tasks = tasks
def on_any_event(self, event):
nullfh = open(os.devnull, "w")
# Check to see if the tasks are specified as a single task or multiple
# tasks.
if isinstance(self._tasks, basestring):
tasks = [self._tasks]
else:
tasks = self._tasks
# Run `cake build` and send all stdout to `/dev/null`.
p = subprocess.Popen(["cake"] + tasks, cwd=self._cakedir, stdout=nullfh)
p.wait()
nullfh.close()
| from __future__ import absolute_import
import os
import subprocess
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
class Cake(object):
def __init__(self, app=None, tasks=["build"]):
"""Initalize a new instance of Flask-Cake.
:param app: The Flask app
:param tasks: A string containing a cake "task" to execute or a list
of multiple cake tasks to run. By default, this will run
``cake build``.
"""
self.init_app(app, tasks)
def init_app(self, app, tasks):
"""Initalize a new instance of Flask-Cake.
:param app: The Flask app
:param tasks: A string containing a cake "task" to execute or a list
of multiple cake tasks to run. By default, this will run
``cake build``.
"""
self.app = app
self.tasks = tasks
self._watchdog()
def _watchdog(self):
"""Runs Watchdog to listen to filesystem events.
The directory currently requires the CoffeeScript files to be located
in `static/coffee`. This directory should contain the `Cakefile`. When
first run, it touches the `Cakefile` to trigger the initial build.
"""
if not hasattr(self.app, 'static_url_path'):
from warnings import warn
warn(
DeprecationWarning('static_path is called static_url_path since Flask 0.7'),
stacklevel=2
)
static_url_path = self.app.static_path
else:
static_url_path = self.app.static_url_path
static_dir = self.app.root_path + static_url_path
cakedir = os.path.join(static_dir, "coffee")
# Setup Watchdog
handler = Events(cakedir=cakedir, tasks=self.tasks)
observer = Observer(timeout=5000)
observer.schedule(handler, path=cakedir, recursive=True)
observer.start()
# "Touch" the Cakefile to signal the initial build
cakefile = os.path.join(cakedir, "Cakefile")
with file(cakefile, 'a'):
os.utime(cakefile, None)
class Events(FileSystemEventHandler):
"""Handler for all filesystem events."""
def __init__(self, cakedir, tasks):
super(Events, self).__init__()
self._cakedir = cakedir
self._tasks = tasks
def on_any_event(self, event):
nullfh = open(os.devnull, "w")
# Check to see if the tasks are specified as a single task or multiple
# tasks.
if isinstance(self._tasks, basestring):
tasks = [self._tasks]
else:
tasks = self._tasks
# Run `cake build` and send all stdout to `/dev/null`.
p = subprocess.Popen(["cake"] + tasks, cwd=self._cakedir, stdout=nullfh)
p.wait()
nullfh.close()
| Python | 0 |
cd25fd1bd40a98886b92f5e3b357ee0ab2796c7b | add /query route, with plain text for mongo | flaskr/__init__.py | flaskr/__init__.py | #!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "dbEscuchas"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
#Cambiar por Path Absoluto en el servidor
QUERIES_FILENAME = '/var/www/FlaskApp/queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r', encoding='utf-8') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
if not query is None:
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('mongo.html', results=results)
else:
return "no query"
@app.route("/query")
def ruta_query():
query = request.args.get("query")
if not query is None:
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
# return render_template('mongo.html', results=results)
return str(results);
else:
return "{}" # No query
@app.route("/postgres")
def postgres():
return "Postgres API is not available"
query = request.args.get("query")
if not query is None:
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('postgres.html', results=results)
else:
return "no query"
@app.route("/example")
def example():
return render_template('example.html')
if __name__ == "__main__":
app.run()
| #!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "dbEscuchas"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
#Cambiar por Path Absoluto en el servidor
QUERIES_FILENAME = '/var/www/FlaskApp/queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r', encoding='utf-8') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
if not query is None:
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('mongo.html', results=results)
else:
return "no query"
@app.route("/postgres")
def postgres():
return "Postgres API is not available"
query = request.args.get("query")
if not query is None:
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('postgres.html', results=results)
else:
return "no query"
@app.route("/example")
def example():
return render_template('example.html')
if __name__ == "__main__":
app.run()
| Python | 0 |
af4b53a85aec95c9ec7bf20b1c019ec0f397eacb | Bump version to 0.2.2 | flavio/_version.py | flavio/_version.py | __version__='0.2.2'
| __version__='0.2.1'
| Python | 0.000002 |
b2b5e91649cdfadda63e11dcfe5ef5c105d28f23 | Add timeout cli arg | pg_bawler/listener.py | pg_bawler/listener.py | #!/usr/bin/env python
'''
Listen on given channel for notification.
$ python -m pg_bawler.listener mychannel
If you installed notification trigger with ``pg_bawler.gen_sql`` then
channel is the same as ``tablename`` argument.
'''
import argparse
import asyncio
import importlib
import logging
import sys
import pg_bawler.core
LOGGER = logging.getLogger('pg_bawler.listener')
class DefaultHandler:
def __init__(self):
self.count = 0
async def handle_notification(self, notification):
self.count += 1
notification_number = self.count
LOGGER.info(
'Received notification #%s pid %s from channel %s: %s',
notification_number, notification.pid,
notification.channel, notification.payload)
def get_default_cli_args_parser():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'--log-level',
metavar='LOG_LEVEL',
default='INFO',
help='Log level. One of: FATAL, CIRTICAL, ERROR, WARNING, INFO, DEBUG')
parser.add_argument(
'--dsn',
metavar='DSN',
required=True,
help='Connection string. e.g. `dbname=test user=postgres`')
parser.add_argument(
'--timeout',
metavar='TIMEOUT', default=5, type=int,
help=(
'Timeout for getting notification.'
' If this timeout passes pg_bawler checks'
' connection if it\'s alive'))
parser.add_argument(
'--handler',
metavar='HANDLER', default='pg_bawler.listener:default_handler',
help=(
'Module and name of python callable.'
' e.g. `pg_bawler.listener:default_handler`'))
parser.add_argument(
'channel',
metavar='CHANNEL', type=str,
help='Name of Notify/Listen channel to listen on.')
return parser
def resolve_handler(handler_str):
module_name, callable_name = handler_str.split(':')
return getattr(importlib.import_module(module_name), callable_name)
default_handler = DefaultHandler().handle_notification
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
def main(*argv):
args = get_default_cli_args_parser().parse_args(argv or sys.argv[1:])
try:
logging.basicConfig(
format='[%(asctime)s][%(name)s][%(levelname)s]: %(message)s',
level=args.log_level.upper())
except TypeError:
sys.exit('Worng log level. --help for more info.')
LOGGER.info('Starting pg_bawler listener for channel: %s', args.channel)
loop = asyncio.get_event_loop()
listener = NotificationListener(connection_params={'dsn': args.dsn})
listener.listen_timeout = args.timeout
listener.register_handler(resolve_handler(args.handler))
loop.run_until_complete(listener.register_channel(args.channel))
loop.run_until_complete(listener.listen())
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
'''
Listen on given channel for notification.
$ python -m pg_bawler.listener mychannel
If you installed notification trigger with ``pg_bawler.gen_sql`` then
channel is the same as ``tablename`` argument.
'''
import argparse
import asyncio
import importlib
import logging
import sys
import pg_bawler.core
LOGGER = logging.getLogger('pg_bawler.listener')
class DefaultHandler:
def __init__(self):
self.count = 0
async def handle_notification(self, notification):
self.count += 1
notification_number = self.count
LOGGER.info(
'Received notification #%s pid %s from channel %s: %s',
notification_number, notification.pid,
notification.channel, notification.payload)
def get_default_cli_args_parser():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'--log-level',
metavar='LOG_LEVEL',
default='info',
help='Log level. One of: fatal, cirtical, error, warning, info, debug')
parser.add_argument(
'--dsn',
metavar='DSN',
help='Connection string. e.g. `dbname=test user=postgres`')
parser.add_argument(
'--handler',
metavar='HANDLER', default='pg_bawler.listener:default_handler',
help=(
'Module and name of python callable.'
' e.g. `pg_bawler.listener:default_handler`'))
parser.add_argument(
'channel',
metavar='CHANNEL', type=str,
help='Name of Notify/Listen channel to listen on.')
return parser
def resolve_handler(handler_str):
module_name, callable_name = handler_str.split(':')
return getattr(importlib.import_module(module_name), callable_name)
default_handler = DefaultHandler().handle_notification
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
def main(*argv):
args = get_default_cli_args_parser(argv or sys.argv[1:]).parse_args()
try:
logging.basicConfig(
format='[%(asctime)s][%(name)s][%(levelname)s]: %(message)s',
level=args.log_level)
except TypeError:
sys.exit('Worng log level. --help for more info.')
LOGGER.info('Starting pg_bawler listener for channel: %s', args.channel)
loop = asyncio.get_event_loop()
listener = NotificationListener(connection_params={'dsn': args.dsn})
listener.listen_timeout = 5
listener.register_handler(resolve_handler(args.handler))
loop.run_until_complete(listener.register_channel(args.channel))
loop.run_until_complete(listener.listen())
if __name__ == '__main__':
sys.exit(main())
| Python | 0.000001 |
8602d984a9caf73dc40168e0e7937c9e930d035b | Stop $PYTHONPATH from messing up the search path for DLLs. CURA-3418 Cura build on Win 64 fails due to $PYTHONPATH | cura_app.py | cura_app.py | #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import os
import sys
import platform
from UM.Platform import Platform
#WORKAROUND: GITHUB-88 GITHUB-385 GITHUB-612
if Platform.isLinux(): # Needed for platform.linux_distribution, which is not available on Windows and OSX
# For Ubuntu: https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826
if platform.linux_distribution()[0] in ("debian", "Ubuntu", "LinuxMint"): # TODO: Needs a "if X11_GFX == 'nvidia'" here. The workaround is only needed on Ubuntu+NVidia drivers. Other drivers are not affected, but fine with this fix.
import ctypes
from ctypes.util import find_library
libGL = find_library("GL")
ctypes.CDLL(libGL, ctypes.RTLD_GLOBAL)
# When frozen, i.e. installer version, don't let PYTHONPATH mess up the search path for DLLs.
if Platform.isWindows() and hasattr(sys, "frozen"):
try:
del os.environ["PYTHONPATH"]
except KeyError: pass
#WORKAROUND: GITHUB-704 GITHUB-708
# It looks like setuptools creates a .pth file in
# the default /usr/lib which causes the default site-packages
# to be inserted into sys.path before PYTHONPATH.
# This can cause issues such as having libsip loaded from
# the system instead of the one provided with Cura, which causes
# incompatibility issues with libArcus
if "PYTHONPATH" in os.environ.keys(): # If PYTHONPATH is used
PYTHONPATH = os.environ["PYTHONPATH"].split(os.pathsep) # Get the value, split it..
PYTHONPATH.reverse() # and reverse it, because we always insert at 1
for PATH in PYTHONPATH: # Now beginning with the last PATH
PATH_real = os.path.realpath(PATH) # Making the the path "real"
if PATH_real in sys.path: # This should always work, but keep it to be sure..
sys.path.remove(PATH_real)
sys.path.insert(1, PATH_real) # Insert it at 1 after os.curdir, which is 0.
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus #@UnusedImport
from UM.Platform import Platform
import cura.CuraApplication
import cura.Settings.CuraContainerRegistry
if Platform.isWindows() and hasattr(sys, "frozen"):
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
# Force an instance of CuraContainerRegistry to be created and reused later.
cura.Settings.CuraContainerRegistry.CuraContainerRegistry.getInstance()
# This prestart up check is needed to determine if we should start the application at all.
if not cura.CuraApplication.CuraApplication.preStartUp():
sys.exit(0)
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| #!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import os
import sys
import platform
from UM.Platform import Platform
#WORKAROUND: GITHUB-88 GITHUB-385 GITHUB-612
if Platform.isLinux(): # Needed for platform.linux_distribution, which is not available on Windows and OSX
# For Ubuntu: https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826
if platform.linux_distribution()[0] in ("debian", "Ubuntu", "LinuxMint"): # TODO: Needs a "if X11_GFX == 'nvidia'" here. The workaround is only needed on Ubuntu+NVidia drivers. Other drivers are not affected, but fine with this fix.
import ctypes
from ctypes.util import find_library
libGL = find_library("GL")
ctypes.CDLL(libGL, ctypes.RTLD_GLOBAL)
#WORKAROUND: GITHUB-704 GITHUB-708
# It looks like setuptools creates a .pth file in
# the default /usr/lib which causes the default site-packages
# to be inserted into sys.path before PYTHONPATH.
# This can cause issues such as having libsip loaded from
# the system instead of the one provided with Cura, which causes
# incompatibility issues with libArcus
if "PYTHONPATH" in os.environ.keys(): # If PYTHONPATH is used
PYTHONPATH = os.environ["PYTHONPATH"].split(os.pathsep) # Get the value, split it..
PYTHONPATH.reverse() # and reverse it, because we always insert at 1
for PATH in PYTHONPATH: # Now beginning with the last PATH
PATH_real = os.path.realpath(PATH) # Making the the path "real"
if PATH_real in sys.path: # This should always work, but keep it to be sure..
sys.path.remove(PATH_real)
sys.path.insert(1, PATH_real) # Insert it at 1 after os.curdir, which is 0.
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus #@UnusedImport
from UM.Platform import Platform
import cura.CuraApplication
import cura.Settings.CuraContainerRegistry
if Platform.isWindows() and hasattr(sys, "frozen"):
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
# Force an instance of CuraContainerRegistry to be created and reused later.
cura.Settings.CuraContainerRegistry.CuraContainerRegistry.getInstance()
# This prestart up check is needed to determine if we should start the application at all.
if not cura.CuraApplication.CuraApplication.preStartUp():
sys.exit(0)
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
| Python | 0 |
90571c86f39fee14fafcc9c030de66d4255c5d82 | Change naming style | lexos/interfaces/statistics_interface.py | lexos/interfaces/statistics_interface.py | from flask import request, session, render_template, Blueprint
from lexos.helpers import constants as constants
from lexos.interfaces.base_interface import detect_active_docs
from lexos.managers import utility, session_manager as session_manager
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
stats_view = Blueprint('statistics', __name__)
# Tells Flask to load this function when someone is at '/statsgenerator'
@stats_view.route("/statistics", methods=["GET", "POST"])
def statistics():
"""
Handles the functionality on the Statistics page ...
Note: Returns a response object (often a render_template call) to flask and
eventually to the browser.
"""
# Detect the number of active documents.
num_active_docs = detect_active_docs()
file_manager = utility.load_file_manager()
labels = file_manager.get_active_labels()
if request.method == "GET":
# "GET" request occurs when the page is first loaded.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'statisticoption' not in session:
session['statisticoption'] = {'segmentlist': list(
map(str,
list(file_manager.files.keys())))} # default is all on
return render_template(
'statistics.html',
labels=labels,
labels2=labels,
itm="statistics",
numActiveDocs=num_active_docs)
if request.method == "POST":
token = request.form['tokenType']
file_info_list, corpus_info = utility.generate_statistics(
file_manager)
session_manager.cache_analysis_option()
session_manager.cache_statistic_option()
# DO NOT save fileManager!
return render_template(
'statistics.html',
labels=labels,
FileInfoList=file_info_list,
corpusInfo=corpus_info,
token=token,
itm="statistics",
numActiveDocs=num_active_docs)
| from flask import request, session, render_template, Blueprint
from lexos.helpers import constants as constants
from lexos.managers import utility, session_manager as session_manager
from lexos.interfaces.base_interface import detect_active_docs
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
stats_view = Blueprint('statistics', __name__)
# Tells Flask to load this function when someone is at '/statsgenerator'
@stats_view.route("/statistics", methods=["GET", "POST"])
def statistics():
"""
Handles the functionality on the Statistics page ...
Note: Returns a response object (often a render_template call) to flask and
eventually to the browser.
"""
# Detect the number of active documents.
num_active_docs = detect_active_docs()
file_manager = utility.load_file_manager()
labels = file_manager.get_active_labels()
if request.method == "GET":
# "GET" request occurs when the page is first loaded.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'statisticoption' not in session:
session['statisticoption'] = {'segmentlist': list(
map(str,
list(file_manager.files.keys())))} # default is all on
return render_template(
'statistics.html',
labels=labels,
labels2=labels,
itm="statistics",
numActiveDocs=num_active_docs)
if request.method == "POST":
token = request.form['tokenType']
file_info_dict, corpus_info_dict = utility.generate_statistics(
file_manager)
session_manager.cache_analysis_option()
session_manager.cache_statistic_option()
# DO NOT save fileManager!
return render_template(
'statistics.html',
labels=labels,
FileInfoDict=file_info_dict,
corpusInfoDict=corpus_info_dict,
token=token,
itm="statistics",
numActiveDocs=num_active_docs)
| Python | 0.000002 |
009cdf804f0f730ed081c6003eedb1015283948f | update to test for non categorized event publishing | lg_replay/test/offline/test_lg_replay.py | lg_replay/test/offline/test_lg_replay.py | #!/usr/bin/env python
PKG = 'lg_replay'
NAME = 'test_lg_replay'
import rospy
import unittest
import json
from evdev import InputEvent
from lg_replay import DeviceReplay
from interactivespaces_msgs.msg import GenericMessage
class MockDevice:
def __init__(self):
self.events = [
InputEvent(1441716733L, 879280L, 3, 0, 9888L),
InputEvent(1441716733L, 879280L, 3, 1, 15600L),
InputEvent(1441716733L, 879280L, 0, 0, 0L),
InputEvent(1441716733L, 981276L, 3, 53, 9872L),
InputEvent(1441716733L, 981276L, 3, 54, 15664L),
InputEvent(1441716733L, 981276L, 3, 0, 9872L),
InputEvent(1441716733L, 981276L, 3, 1, 15664L),
InputEvent(1441716733L, 981276L, 0, 0, 0L),
InputEvent(1441716733L, 982263L, 3, 57, -1L),
InputEvent(1441716733L, 982263L, 1, 330, 0L) # < this event gets tested
]
def read_loop(self):
return self.events
class MockPublisher:
def __init__(self):
self.published_messages = []
def get_published_messages(self):
return self.published_messages
def publish_event(self, message):
self.published_messages.append(message)
class TestReplay(unittest.TestCase):
def setUp(self):
self.mock_device = MockDevice()
self.mock_publisher = MockPublisher()
self.replay = DeviceReplay(self.mock_publisher, 'blah', event_ecode='EV_KEY', device=self.mock_device)
def test_events_get_filtered_and_published(self):
self.replay.run()
self.assertEqual(type(self.mock_publisher.get_published_messages()), list)
self.assertEqual(len(self.mock_publisher.get_published_messages()), 1)
self.assertEqual(type(self.mock_publisher.get_published_messages()[0]), dict)
message = self.mock_publisher.get_published_messages()[0]
self.assertEqual(message['code'], 330)
self.assertEqual(message['value'], 0)
if __name__ == '__main__':
import rostest
rostest.rosrun(PKG, NAME, TestReplay)
| #!/usr/bin/env python
PKG = 'lg_replay'
NAME = 'test_lg_replay'
import rospy
import unittest
import json
from evdev import InputEvent
from lg_replay import DeviceReplay
from interactivespaces_msgs.msg import GenericMessage
class MockDevice:
def __init__(self):
self.events = [
InputEvent(1441716733L, 879280L, 3, 0, 9888L),
InputEvent(1441716733L, 879280L, 3, 1, 15600L),
InputEvent(1441716733L, 879280L, 0, 0, 0L),
InputEvent(1441716733L, 981276L, 3, 53, 9872L),
InputEvent(1441716733L, 981276L, 3, 54, 15664L),
InputEvent(1441716733L, 981276L, 3, 0, 9872L),
InputEvent(1441716733L, 981276L, 3, 1, 15664L),
InputEvent(1441716733L, 981276L, 0, 0, 0L),
InputEvent(1441716733L, 982263L, 3, 57, -1L),
InputEvent(1441716733L, 982263L, 1, 330, 0L) # < this event gets tested
]
def read_loop(self):
return self.events
class MockPublisher:
def __init__(self):
self.published_messages = []
def get_published_messages(self):
return self.published_messages
def publish_event(self, message):
self.published_messages.append(message)
class TestReplay(unittest.TestCase):
def setUp(self):
self.mock_device = MockDevice()
self.mock_publisher = MockPublisher()
self.replay = DeviceReplay(self.mock_publisher, 'blah', event_ecode='EV_KEY', device=self.mock_device)
def test_events_get_filtered_and_published(self):
self.replay.run()
self.assertEqual(type(self.mock_publisher.get_published_messages()), list)
self.assertEqual(len(self.mock_publisher.get_published_messages()), 1)
self.assertEqual(type(self.mock_publisher.get_published_messages()[0]), dict)
message = self.mock_publisher.get_published_messages()[0]
self.assertEqual(message['scancode'], 330)
self.assertEqual(message['keystate'], 0)
self.assertEqual(message['keycode'], 'BTN_TOUCH')
if __name__ == '__main__':
import rostest
rostest.rosrun(PKG, NAME, TestReplay)
| Python | 0 |
41548ba9efb1d47c823696adeb13560bdbb73878 | allow update of IP to timeout without quitting loop | dyndnsc/updater/base.py | dyndnsc/updater/base.py | # -*- coding: utf-8 -*-
import logging
import requests
from ..common.subject import Subject
from ..common.events import IP_UPDATE_SUCCESS, IP_UPDATE_ERROR
log = logging.getLogger(__name__)
class UpdateProtocol(Subject):
"""
base class for all update protocols that use the dyndns2 update protocol
"""
_updateurl = None
theip = None
hostname = None # this holds the desired dns hostname
status = 0
def __init__(self):
self.updateurl = self._updateurl
super(UpdateProtocol, self).__init__()
def updateUrl(self):
return self.updateurl
def success(self):
self.status = 0
self.notify_observers(IP_UPDATE_SUCCESS, "Updated IP address of '%s' to %s" % (self.hostname, self.theip))
def abuse(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "This client is considered to be abusive for hostname '%s'" % (self.hostname))
def nochg(self):
self.status = 0
def nohost(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Invalid/non-existant hostname: [%s]" % (self.hostname))
def failure(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Service is failing")
def notfqdn(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "The provided hostname '%s' is not a valid hostname!" % (self.hostname))
def protocol(self):
timeout = 60
params = {'myip': self.theip, 'hostname': self.hostname}
try:
r = requests.get(self.updateUrl(), params=params,
auth=(self.userid, self.password), timeout=timeout)
except requests.exceptions.Timeout as exc:
log.warning("HTTP timeout(%i) occurred while updating IP at '%s'",
timeout, self.updateUrl(), exc_info=exc)
return False
finally:
r.close()
log.debug("status %i, %s", r.status_code, r.text)
if r.status_code == 200:
if r.text.startswith("good "):
self.success()
return self.theip
elif r.text.startswith('nochg'):
self.nochg()
return self.theip
elif r.text == 'nohost':
self.nohost()
return 'nohost'
elif r.text == 'abuse':
self.abuse()
return 'abuse'
elif r.text == '911':
self.failure()
return '911'
elif r.text == 'notfqdn':
self.notfqdn()
return 'notfqdn'
else:
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Problem updating IP address of '%s' to %s: %s" % (self.hostname, self.theip, r.text))
return r.text
else:
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Problem updating IP address of '%s' to %s: %s" % (self.hostname, self.theip, r.status_code))
return 'invalid http status code: %s' % r.status_code
| # -*- coding: utf-8 -*-
import logging
import requests
from ..common.subject import Subject
from ..common.events import IP_UPDATE_SUCCESS, IP_UPDATE_ERROR
log = logging.getLogger(__name__)
class UpdateProtocol(Subject):
"""
base class for all update protocols that use the dyndns2 update protocol
"""
_updateurl = None
theip = None
hostname = None # this holds the desired dns hostname
status = 0
def __init__(self):
self.updateurl = self._updateurl
super(UpdateProtocol, self).__init__()
def updateUrl(self):
return self.updateurl
def success(self):
self.status = 0
self.notify_observers(IP_UPDATE_SUCCESS, "Updated IP address of '%s' to %s" % (self.hostname, self.theip))
def abuse(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "This client is considered to be abusive for hostname '%s'" % (self.hostname))
def nochg(self):
self.status = 0
def nohost(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Invalid/non-existant hostname: [%s]" % (self.hostname))
def failure(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Service is failing")
def notfqdn(self):
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "The provided hostname '%s' is not a valid hostname!" % (self.hostname))
def protocol(self):
params = {'myip': self.theip, 'hostname': self.hostname}
r = requests.get(self.updateUrl(), params=params, auth=(self.userid, self.password), timeout=60)
r.close()
log.debug("status %i, %s", r.status_code, r.text)
if r.status_code == 200:
if r.text.startswith("good "):
self.success()
return self.theip
elif r.text.startswith('nochg'):
self.nochg()
return self.theip
elif r.text == 'nohost':
self.nohost()
return 'nohost'
elif r.text == 'abuse':
self.abuse()
return 'abuse'
elif r.text == '911':
self.failure()
return '911'
elif r.text == 'notfqdn':
self.notfqdn()
return 'notfqdn'
else:
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Problem updating IP address of '%s' to %s: %s" % (self.hostname, self.theip, r.text))
return r.text
else:
self.status = 1
self.notify_observers(IP_UPDATE_ERROR, "Problem updating IP address of '%s' to %s: %s" % (self.hostname, self.theip, r.status_code))
return 'invalid http status code: %s' % r.status_code
| Python | 0 |
25f57a023f978fca94bbeb9655a4d90f0b2d95f0 | Fix typo | pints/toy/__init__.py | pints/toy/__init__.py | #
# Root of the toy module.
# Provides a number of toy models and logpdfs for tests of Pints' functions.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
from ._toy_classes import ToyLogPDF, ToyModel, ToyODEModel
from ._annulus import AnnulusLogPDF
from ._beeler_reuter_model import ActionPotentialModel
from ._cone import ConeLogPDF
from ._constant_model import ConstantModel
from ._eight_schools import EightSchoolsLogPDF
from ._fitzhugh_nagumo_model import FitzhughNagumoModel
from ._gaussian import GaussianLogPDF
from ._german_credit import GermanCreditLogPDF
from ._german_credit_hierarchical import GermanCreditHierarchicalLogPDF
from ._goodwin_oscillator_model import GoodwinOscillatorModel
from ._hes1_michaelis_menten import Hes1Model
from ._hh_ik_model import HodgkinHuxleyIKModel
from ._high_dimensional_gaussian import HighDimensionalGaussianLogPDF
from ._logistic_model import LogisticModel
from ._lotka_volterra_model import LotkaVolterraModel
from ._multimodal_gaussian import MultimodalGaussianLogPDF
from ._neals_funnel import NealsFunnelLogPDF
from ._parabola import ParabolicError
from ._repressilator_model import RepressilatorModel
from ._rosenbrock import RosenbrockError, RosenbrockLogPDF
from ._sho_model import SimpleHarmonicOscillatorModel
from ._simple_egg_box import SimpleEggBoxLogPDF
from ._sir_model import SIRModel
from ._twisted_gaussian_banana import TwistedGaussianLogPDF
from ._stochastic_degradation_model import StochasticDegradationModel
from ._stochastic_logistic_model import StochasticLogisticModel
| #
# Root of the toy module.
# Provides a number of toy models and logpdfs for tests of Pints' functions.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
from ._toy_classes import ToyLogPDF, ToyModel, ToyODEModel
from ._annulus import AnnulusLogPDF
from ._beeler_reuter_model import ActionPotentialModel
from ._cone import ConeLogPDF
from ._constant_model import ConstantModel
from ._eight_schools import EightSchoolsLogPDF
from ._fitzhugh_nagumo_model import FitzhughNagumoModel
from ._gaussian import GaussianLogPDF
from ._german_credit import GermanCreditLogPDF
from ._german_credit_hierarchical import GermanCreditHierarchicalLogPDF
from ._goodwin_oscillator_model import GoodwinOscillatorModel
from ._hes1_michaelis_menten import Hes1Model
from ._hh_ik_model import HodgkinHuxleyIKModel
from ._high_dimensional_gaussian import HighDimensionalGaussianLogPDF
from ._logistic_model import LogisticModel
from ._lotka_volterra_model import LotkaVolterraModel
from ._multimodal_gaussian import MultimodalGaussianLogPDF
from ._neals_funnel import NealsFunnelLogPDF
from ._parabola import ParabolicError
from ._repressilator_model import RepressilatorModel
from ._rosenbrock import RosenbrockError, RosenbrockLogPDF
from ._sho_model import SimpleHarmonicOscillatorModel
from ._simple_egg_box import SimpleEggBoxLogPDF
from ._sir_model import SIRModel
from ._twisted_gaussian_banana import TwistedGaussianLogPDF
from ._stochastic_degradation_model import StochasticDegradationModel
from ._stochastic_logistic model import StochasticLogisticModel
| Python | 0.999999 |
5b120e5b89c06a0a5c01f8c710f85a4a179f56f7 | Change HTML theme to match BIND ARM, add copyright, EPUB info | doc/conf.py | doc/conf.py | ############################################################################
# Copyright (C) Internet Systems Consortium, Inc. ("ISC")
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at https://mozilla.org/MPL/2.0/.
#
# See the COPYRIGHT file distributed with this work for additional
# information regarding copyright ownership.
############################################################################
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'ISC DNSSEC Guide'
copyright = '2020, Internet Systems Consortium'
author = 'Internet Systems Consortium'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The master toctree document.
master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for EPUB output -------------------------------------------------
epub_basename = 'DNSSECGuide'
| # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'ISC DNSSEC Guide'
copyright = '2020, Internet Systems Consortium'
author = 'Internet Systems Consortium'
# The full version, including alpha/beta/rc tags
release = '2020'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The master toctree document.
master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
| Python | 0 |
6366fe6da78cd0e910b52352b918ff18d89f25c4 | update tests_forms | myideas/core/tests/test_forms.py | myideas/core/tests/test_forms.py | from django.test import TestCase
from django.shortcuts import resolve_url as r
from registration.forms import RegistrationForm
from myideas.core.forms import IdeasForm, IdeasFormUpdate
class IdeasFormTest(TestCase):
def setUp(self):
self.form = IdeasForm()
def test_form_has_fields(self):
"""IdeasForm must have 3 fields"""
expected = ('title', 'description', 'tags')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Ideasform field is required."""
form = IdeasForm({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Ideasform field is not present."""
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('user'))
class IdeasFormUpdateTest(TestCase):
def setUp(self):
self.form = IdeasFormUpdate()
def test_form_has_fields(self):
"""UpdateForm must have 2 fields"""
expected = ('title', 'description')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Updateform field is required."""
form = IdeasFormUpdate({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Updateform field is not present."""
self.assertFalse(self.form.fields.get('user'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('tags'))
class RegisterIdea(TestCase):
def test_registration_get(self):
resp = self.client.get(r('registration_register'))
self.failUnless(isinstance(resp.context['form'],
RegistrationForm))
| from django.test import TestCase
from django.shortcuts import resolve_url as r
from registration.forms import RegistrationForm
from myideas.core.forms import IdeasForm, IdeasFormUpdate
class IdeasFormTest(TestCase):
def setUp(self):
self.form = IdeasForm()
def test_form_has_fields(self):
"""IdeasForm must have 3 fields"""
expected = ('title', 'description', 'tags')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Ideasform field is required."""
form = IdeasForm({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Ideasform field is not present."""
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('user'))
class IdeasFormUpdateTest(TestCase):
def setUp(self):
self.form = IdeasFormUpdate()
def test_form_has_fields(self):
"""UpdateForm must have 2 fields"""
expected = ('title', 'description')
self.assertSequenceEqual(expected, list(self.form.fields))
def test_all_required_form_fields(self):
"""Test Updateform field is required."""
form = IdeasFormUpdate({
'title': '',
'description': '',
})
self.assertFalse(form.is_valid())
self.assertIn('title', form.errors)
self.assertIn('description', form.errors)
def test_fields_not_present(self):
"""Test Updateform field is not present."""
self.assertFalse(self.form.fields.get('user'))
self.assertFalse(self.form.fields.get('slug'))
self.assertFalse(self.form.fields.get('created_at'))
self.assertFalse(self.form.fields.get('tags'))
class RegisterIdea(TestCase):
def test_registration_get(self):
resp = self.client.get(r('registration_register'))
self.failUnless(isinstance(resp.context['form'],
RegistrationForm))
| Python | 0.000001 |
82b87b05068a8fb56f2983714c08c0b822b5dde5 | Remove settings leftover from sphinx-releases | doc/conf.py | doc/conf.py | # -*- coding: utf-8 -*-
import os
import sys
import alagitpull
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
sys.path.insert(0, project_root)
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext")))
# package data
about = {}
with open("../unihan_db/__about__.py") as fp:
exec(fp.read(), about)
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.napoleon',
'alagitpull',
'sphinx_issues',
]
issues_github_path = about['__github__']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = about['__title__']
copyright = about['__copyright__']
version = '%s' % ('.'.join(about['__version__'].split('.'))[:2])
release = '%s' % (about['__version__'])
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme_path = [alagitpull.get_path()]
html_static_path = ['_static']
html_favicon = 'favicon.ico'
html_theme = 'alagitpull'
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'more.html',
'searchbox.html',
]
}
html_theme_options = {
'logo': 'img/cihai.svg',
'github_user': 'cihai',
'github_repo': 'unihan-db',
'github_type': 'star',
'github_banner': True,
'projects': alagitpull.projects,
'project_name': 'db',
}
alagitpull_internal_hosts = ['unihan-db.git-pull.com', '0.0.0.0']
alagitpull_external_hosts_new_window = True
htmlhelp_basename = '%sdoc' % about['__title__']
latex_documents = [
(
'index',
'{0}.tex'.format(about['__package_name__']),
'{0} Documentation'.format(about['__title__']),
about['__author__'],
'manual',
)
]
man_pages = [
(
'index',
about['__package_name__'],
'{0} Documentation'.format(about['__title__']),
about['__author__'],
1,
)
]
texinfo_documents = [
(
'index',
'{0}'.format(about['__package_name__']),
'{0} Documentation'.format(about['__title__']),
about['__author__'],
about['__package_name__'],
about['__description__'],
'Miscellaneous',
)
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest/', None),
}
| # -*- coding: utf-8 -*-
import os
import sys
import alagitpull
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
sys.path.insert(0, project_root)
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext")))
# package data
about = {}
with open("../unihan_db/__about__.py") as fp:
exec(fp.read(), about)
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.napoleon',
'alagitpull',
'sphinx_issues',
]
releases_unstable_prehistory = True
releases_document_name = ["history"]
releases_issue_uri = "https://github.com/cihai/unihan-db/issues/%s"
releases_release_uri = "https://github.com/cihai/unihan-db/tree/v%s"
issues_github_path = about['__github__']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = about['__title__']
copyright = about['__copyright__']
version = '%s' % ('.'.join(about['__version__'].split('.'))[:2])
release = '%s' % (about['__version__'])
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme_path = [alagitpull.get_path()]
html_static_path = ['_static']
html_favicon = 'favicon.ico'
html_theme = 'alagitpull'
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'more.html',
'searchbox.html',
]
}
html_theme_options = {
'logo': 'img/cihai.svg',
'github_user': 'cihai',
'github_repo': 'unihan-db',
'github_type': 'star',
'github_banner': True,
'projects': alagitpull.projects,
'project_name': 'db',
}
alagitpull_internal_hosts = ['unihan-db.git-pull.com', '0.0.0.0']
alagitpull_external_hosts_new_window = True
htmlhelp_basename = '%sdoc' % about['__title__']
latex_documents = [
(
'index',
'{0}.tex'.format(about['__package_name__']),
'{0} Documentation'.format(about['__title__']),
about['__author__'],
'manual',
)
]
man_pages = [
(
'index',
about['__package_name__'],
'{0} Documentation'.format(about['__title__']),
about['__author__'],
1,
)
]
texinfo_documents = [
(
'index',
'{0}'.format(about['__package_name__']),
'{0} Documentation'.format(about['__title__']),
about['__author__'],
about['__package_name__'],
about['__description__'],
'Miscellaneous',
)
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest/', None),
}
| Python | 0 |
b3dfb211d0d81210dcaa317a0d6f79b6ad249816 | Update netlogo_example.py | ema_workbench/examples/netlogo_example.py | ema_workbench/examples/netlogo_example.py | """
This example is a proof of principle for how NetLogo models can be
controlled using pyNetLogo and the ema_workbench. Note that this
example uses the NetLogo 6 version of the predator prey model that
comes with NetLogo. If you are using NetLogo 5, replace the model file
with the one that comes with NetLogo.
"""
import numpy as np
from ema_workbench import (RealParameter, ema_logging, ScalarOutcome,
TimeSeriesOutcome, MultiprocessingEvaluator)
from ema_workbench.connectors.netlogo import NetLogoModel
# Created on 20 mrt. 2013
#
# .. codeauthor:: jhkwakkel
if __name__ == '__main__':
# turn on logging
ema_logging.log_to_stderr(ema_logging.INFO)
model = NetLogoModel('predprey',
wd="./models/predatorPreyNetlogo",
model_file="Wolf Sheep Predation.nlogo")
model.run_length = 100
model.replications = 10
model.uncertainties = [RealParameter("grass-regrowth-time", 1, 99),
RealParameter("initial-number-sheep", 50, 100),
RealParameter("initial-number-wolves", 50, 100),
RealParameter("sheep-reproduce", 5, 10),
RealParameter("wolf-reproduce", 5, 10),
]
model.outcomes = [ScalarOutcome('sheep', variable_name='count sheep',
function=np.mean),
TimeSeriesOutcome('wolves'),
TimeSeriesOutcome('grass')]
# perform experiments
n = 10
with MultiprocessingEvaluator(model, n_processes=2,
maxtasksperchild=4) as evaluator:
results = evaluator.perform_experiments(n)
print() | '''
This example is a proof of principle for how NetLogo models can be
controlled using pyNetLogo and the ema_workbench. Note that this
example uses the NetLogo 6 version of the predator prey model that
comes with NetLogo. If you are using NetLogo 5, replace the model file
with the one that comes with NetLogo.
'''
from ema_workbench import (RealParameter, ema_logging,
TimeSeriesOutcome, MultiprocessingEvaluator)
from ema_workbench.connectors.netlogo import NetLogoModel
# Created on 20 mrt. 2013
#
# .. codeauthor:: jhkwakkel
if __name__ == '__main__':
# turn on logging
ema_logging.log_to_stderr(ema_logging.INFO)
model = NetLogoModel('predprey',
wd="./models/predatorPreyNetlogo",
model_file="Wolf Sheep Predation.nlogo")
model.run_length = 100
model.replications = 10
model.uncertainties = [RealParameter("grass-regrowth-time", 1, 99),
RealParameter("initial-number-sheep", 50, 100),
RealParameter("initial-number-wolves", 50, 100),
RealParameter("sheep-reproduce", 5, 10),
RealParameter("wolf-reproduce", 5, 10),
]
model.outcomes = [TimeSeriesOutcome('sheep'),
TimeSeriesOutcome('wolves'),
TimeSeriesOutcome('grass')]
# perform experiments
n = 10
with MultiprocessingEvaluator(model, n_processes=2,
maxtasksperchild=4) as evaluator:
results = evaluator.perform_experiments(n)
| Python | 0.000001 |
8c6f178782b6470b98536a2384391970e0cbafb9 | Update config file | pelicanconf.py | pelicanconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Edwin Khoo'
SITENAME = 'Edwin Khoo'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = None
# Social widget
SOCIAL = (('GitHub', 'https://github.com/edwinksl'),
('Bitbucket', 'https://bitbucket.org/edwinksl'),
('Facebook', 'https://www.facebook.com/edwinksl'),
('Twitter', 'https://twitter.com/edwinksl'),
('LinkedIn', 'https://www.linkedin.com/in/edwinksl'))
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
THEME = '/home/edwinksl/Git/pelican-bootstrap3'
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Edwin Khoo'
SITENAME = 'Edwin Khoo'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = None
# Social widget
SOCIAL = (('Twitter', 'https://twitter.com/edwinksl'),
('GitHub', 'https://github.com/edwinksl'))
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
THEME = '/home/edwinksl/Git/pelican-bootstrap3'
| Python | 0.000001 |
96d083f6cc6bb9a55a35a494f4c671ed62a3cd40 | make plot for hh example | docs/_code/potential.py | docs/_code/potential.py | import astropy.units as u
import matplotlib.pyplot as plt
import numpy as np
import gary.potential as gp
from gary.units import galactic
# ----------------------------------------------------------------------------
p = gp.MiyamotoNagaiPotential(1E11, 6.5, 0.27, units=(u.kpc, u.Msun, u.Myr))
fig = p.plot_contours(grid=(np.linspace(-15,15,100), 0., 1.), marker=None)
fig.set_size_inches(8,6)
fig.savefig("../_static/potential/miyamoto-nagai-1d.png")
xgrid = np.linspace(-15,15,100)
zgrid = np.linspace(-5,5,100)
fig = p.plot_contours(grid=(xgrid, 1., zgrid))
fig.set_size_inches(8,6)
fig.savefig("../_static/potential/miyamoto-nagai-2d.png")
# ----------------------------------------------------------------------------
r_h = 20.
p = gp.SphericalNFWPotential(v_c=0.5*np.sqrt(np.log(2)-0.5), r_s=r_h, units=(u.kpc, u.Msun, u.Myr))
fig,ax = plt.subplots(1,1,figsize=(8,6))
r = np.zeros((100,3))
r[:,0] = np.logspace(np.log10(r_h/100.), np.log10(100*r_h), len(r))
menc = p.mass_enclosed(r)
ax.loglog(r/r_h, menc, marker=None)
ax.set_xlabel(r"$\log (r/r_s)$")
ax.set_ylabel(r"$M(<r)\,[{\rm M}_\odot]$")
fig.tight_layout()
fig.savefig("../_static/potential/mass-profile.png")
# ----------------------------------------------------------------------------
fig,ax = plt.subplots(1,1,figsize=(6,6))
disk = gp.MiyamotoNagaiPotential(m=1E11, a=6.5, b=0.27, units=galactic)
bulge = gp.HernquistPotential(m=3E10, c=0.7, units=galactic)
pot = gp.CompositePotential(disk=disk, bulge=bulge)
x = z = np.linspace(-3.,3.,100)
fig = pot.plot_contours(grid=(x,0,z), ax=ax)
fig.savefig("../_static/potential/composite.png")
# ----------------------------------------------------------------------------
def henon_heiles_funcs(units):
def value(r, L):
x,y = r.T
return 0.5*(x**2 + y**2) + L*(x**2*y - y**3/3)
def gradient(r, L):
x,y = r.T
grad = np.zeros_like(r)
grad[...,0] = x + 2*L*x*y
grad[...,1] = y + L*(x**2 - y**2)
return grad
def hessian(r, L):
raise NotImplementedError()
return value, gradient, hessian
class HenonHeilesPotential(gp.CartesianPotential):
r"""
The Henon-Heiles potential originally used to describe the non-linear
motion of stars near the Galactic center.
.. math::
\Phi = \frac{1}{2}(x^2 + y^2) + \lambda(x^2 y - \frac{y^3}{3})
Parameters
----------
L : numeric
Lambda parameter.
units : iterable
Unique list of non-reducable units that specify (at minimum) the
length, mass, time, and angle units.
"""
def __init__(self, L, units=None):
parameters = dict(L=L)
func,gradient,hessian = henon_heiles_funcs(units)
super(HenonHeilesPotential, self).__init__(func=func, gradient=gradient,
hessian=hessian,
parameters=parameters, units=units)
potential = HenonHeilesPotential(0.5)
t,w = potential.integrate_orbit([0.,0.,0.5,0.5], dt=0.03, nsteps=50000)
grid = np.linspace(-2,2,100)
fig = potential.plot_contours(grid=(grid,grid), levels=[0, 0.05,0.1,1/6.,0.5,1.,2,3,5],
cmap='Blues_r', subplots_kw=dict(figsize=(6,6)),
labels=['$x$','$y$'])
fig.axes[0].plot(w[:,0,0], w[:,0,1], marker='.',
linestyle='none', color='#fec44f', alpha=0.1)
fig.savefig("../_static/potential/henon-heiles.png")
| import astropy.units as u
import matplotlib.pyplot as plt
import numpy as np
import gary.potential as sp
from gary.units import galactic
# ----------------------------------------------------------------------------
p = sp.MiyamotoNagaiPotential(1E11, 6.5, 0.27, units=(u.kpc, u.Msun, u.Myr))
fig,axes = p.plot_contours(grid=(np.linspace(-15,15,100), 0., 1.), marker=None)
fig.set_size_inches(8,6)
fig.savefig("../_static/potential/miyamoto-nagai-1d.png")
xgrid = np.linspace(-15,15,100)
zgrid = np.linspace(-5,5,100)
fig,axes = p.plot_contours(grid=(xgrid, 1., zgrid))
fig.set_size_inches(8,6)
fig.savefig("../_static/potential/miyamoto-nagai-2d.png")
# ----------------------------------------------------------------------------
r_h = 20.
p = sp.SphericalNFWPotential(v_h=0.5, r_h=r_h, units=(u.kpc, u.Msun, u.Myr))
fig,ax = plt.subplots(1,1,figsize=(8,6))
r = np.zeros((100,3))
r[:,0] = np.logspace(np.log10(r_h/100.), np.log10(100*r_h), len(r))
menc = p.mass_enclosed(r)
ax.loglog(r/r_h, menc, marker=None)
ax.set_xlabel(r"$\log (r/r_s)$")
ax.set_ylabel(r"$M(<r)\,[{\rm M}_\odot]$")
fig.tight_layout()
fig.savefig("../_static/potential/mass-profile.png")
# ----------------------------------------------------------------------------
fig,ax = plt.subplots(1,1,figsize=(6,6))
disk = sp.MiyamotoNagaiPotential(m=1E11, a=6.5, b=0.27, units=galactic)
bulge = sp.HernquistPotential(m=3E10, c=0.7, units=galactic)
pot = sp.CompositePotential(disk=disk, bulge=bulge)
x = z = np.linspace(-3.,3.,100)
fig,ax = pot.plot_contours(grid=(x,0,z), ax=ax)
fig.savefig("../_static/potential/composite.png")
| Python | 0.000003 |
5bfdeb94d64ffe7cbcb750dda2edc48f5a1d23b2 | add a link to python course 2018 | pelicanconf.py | pelicanconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
from pathlib import Path
AUTHOR = u'Python Group'
SITENAME = u'Python Group UEA'
SITEURL = ''
PATH = 'content'
STATIC_PATHS = ['extra', 'extra/robots.txt', 'pdfs', 'figures',
'extra/favicon.ico', 'extra/custom.css']
EXTRA_PATH_METADATA = {
'extra/favicon.ico': {'path': 'favicon.ico'},
'extra/custom.css': {'path': 'extra/custom.css'},
'extra/robots.txt': {'path': 'robots.txt'}
}
CUSTOM_CSS = 'extra/custom.css'
THEME = 'theme'
JINJA_ENVIRONMENT = {'extensions': ['jinja2.ext.i18n']}
BOOTSTRAP_THEME = 'cosmo'
PYGMENTS_STYLE = 'default'
OVERWRITE_NB_HEADER = True
if not Path('_nb_header.html').exists():
Path('_nb_header.html').touch()
EXTRA_HEADER = open('_nb_header.html').read()
TIMEZONE = 'Europe/London'
DEFAULT_LANG = u'en'
PLUGIN_PATHS = ['../pelican-plugins']
PLUGINS = ['tag_cloud', 'summary', 'i18n_subsites',
'liquid_tags.img', 'liquid_tags.video',
'liquid_tags.youtube', 'liquid_tags.vimeo',
'liquid_tags.include_code', 'liquid_tags.notebook']
NOTEBOOK_DIR = 'notebooks'
DISPLAY_TAGS_ON_SIDEBAR = True
DISPLAY_TAGS_INLINE = True
SHOW_ARTICLE_CATEGORY = False
SHOW_ARTICLE_AUTHOR = True
DISPLAY_PAGES_ON_MENU = True
DISPLAY_CATEGORIES_ON_MENU = False
ARCHIVES_SAVE_AS = 'archives.html'
# DIRECT_TEMPLATES = ['index', 'categories', 'authors', 'archives']
#MENUITEMS = [
# ('Archives', '/archives.html')
# ('Python Course 2016',
# 'https://ueapy.github.io/enveast_python_course/'),
# ('Meetings calendar', 'https://ueapy.github.io/meetings-calendar.html'),
# ('Ideas for meetings', 'https://ueapy.github.io/meetings-ideas.html'),
# ]
# SOCIAL = (('github', 'http://github.com/ueapy'))
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (
('Python Course 2018', 'https://ueapy.github.io/pythoncourse2018'),
('Python Course 2016', 'https://ueapy.github.io/enveast_python_course/'),
('Learn Python online', 'http://bafflednerd.com/learn-python-online'),
('Python Videos', 'http://pyvideo.org/'),
('From Python to Numpy', 'http://www.labri.fr/perso/nrougier/from-python-to-numpy/'),
('EarthPy', 'http://earthpy.org/'),
('Python4Oceanographers',
'https://ocefpaf.github.io/python4oceanographers/'),
('PyAOS', 'http://pyaos.johnny-lin.com/'),
('PyHOGs', 'http://pyhogs.github.io/'),
('Pythonic Perambulations', 'https://jakevdp.github.io/'),
('Meteodenny', 'https://dennissergeev.github.io/'),
)
DEFAULT_PAGINATION = 5
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
# Sharing
GITHUB_URL = 'https://github.com/ueapy'
DISQUS_SITENAME = 'pythonuea'
ADDTHIS_PROFILE = "ra-564e4d3ff0b9f071"
FACEBOOK_LIKE = True
GOOGLE_PLUS_ONE = True
GOOGLE_CUSTOM_SEARCH_SIDEBAR = False
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
from pathlib import Path
AUTHOR = u'Python Group'
SITENAME = u'Python Group UEA'
SITEURL = ''
PATH = 'content'
STATIC_PATHS = ['extra', 'extra/robots.txt', 'pdfs', 'figures',
'extra/favicon.ico', 'extra/custom.css']
EXTRA_PATH_METADATA = {
'extra/favicon.ico': {'path': 'favicon.ico'},
'extra/custom.css': {'path': 'extra/custom.css'},
'extra/robots.txt': {'path': 'robots.txt'}
}
CUSTOM_CSS = 'extra/custom.css'
THEME = 'theme'
JINJA_ENVIRONMENT = {'extensions': ['jinja2.ext.i18n']}
BOOTSTRAP_THEME = 'cosmo'
PYGMENTS_STYLE = 'default'
OVERWRITE_NB_HEADER = True
if not Path('_nb_header.html').exists():
Path('_nb_header.html').touch()
EXTRA_HEADER = open('_nb_header.html').read()
TIMEZONE = 'Europe/London'
DEFAULT_LANG = u'en'
PLUGIN_PATHS = ['../pelican-plugins']
PLUGINS = ['tag_cloud', 'summary', 'i18n_subsites',
'liquid_tags.img', 'liquid_tags.video',
'liquid_tags.youtube', 'liquid_tags.vimeo',
'liquid_tags.include_code', 'liquid_tags.notebook']
NOTEBOOK_DIR = 'notebooks'
DISPLAY_TAGS_ON_SIDEBAR = True
DISPLAY_TAGS_INLINE = True
SHOW_ARTICLE_CATEGORY = False
SHOW_ARTICLE_AUTHOR = True
DISPLAY_PAGES_ON_MENU = True
DISPLAY_CATEGORIES_ON_MENU = False
ARCHIVES_SAVE_AS = 'archives.html'
# DIRECT_TEMPLATES = ['index', 'categories', 'authors', 'archives']
#MENUITEMS = [
# ('Archives', '/archives.html')
# ('Python Course 2016',
# 'https://ueapy.github.io/enveast_python_course/'),
# ('Meetings calendar', 'https://ueapy.github.io/meetings-calendar.html'),
# ('Ideas for meetings', 'https://ueapy.github.io/meetings-ideas.html'),
# ]
# SOCIAL = (('github', 'http://github.com/ueapy'))
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Python Course 2016', 'https://ueapy.github.io/enveast_python_course/'),
('Learn Python online', 'http://bafflednerd.com/learn-python-online'),
('Python Videos', 'http://pyvideo.org/'),
('From Python to Numpy', 'http://www.labri.fr/perso/nrougier/from-python-to-numpy/'),
('EarthPy', 'http://earthpy.org/'),
('Python4Oceanographers',
'https://ocefpaf.github.io/python4oceanographers/'),
('PyAOS', 'http://pyaos.johnny-lin.com/'),
('PyHOGs', 'http://pyhogs.github.io/'),
('Pythonic Perambulations', 'https://jakevdp.github.io/'),
('Meteodenny', 'https://dennissergeev.github.io/'),
)
DEFAULT_PAGINATION = 5
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
# Sharing
GITHUB_URL = 'https://github.com/ueapy'
DISQUS_SITENAME = 'pythonuea'
ADDTHIS_PROFILE = "ra-564e4d3ff0b9f071"
FACEBOOK_LIKE = True
GOOGLE_PLUS_ONE = True
GOOGLE_CUSTOM_SEARCH_SIDEBAR = False
| Python | 0 |
5cf5c6028bd7007a867691af966f89574f02de1f | clean up setup | mojolicious/setup.py | mojolicious/setup.py | import subprocess
import sys
import json
from os.path import expanduser
import os
import getpass
home = expanduser("~")
def start(args, logfile, errfile):
conf = {
'database_host' : args.database_host,
'workers' : args.max_threads,
}
with open('mojolicious/app.conf', 'w') as f:
f.write(json.dumps(conf))
try:
# os.environ["MOJO_MODE"] = "production"
subprocess.Popen("hypnotoad ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
try:
subprocess.call("hypnotoad -s ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hypnotoad' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
except subprocess.CalledProcessError:
return 1
| import subprocess
import sys
#import setup_util
import json
from os.path import expanduser
import os
import getpass
home = expanduser("~")
def start(args, logfile, errfile):
# setup_util.replace_text("mojolicious/app.pl", "localhost", ""+ args.database_host +"")
# str(args.max_threads)
conf = {
'database_host': args.database_host,
'workers': args.max_threads,
}
with open('mojolicious/app.conf', 'w') as f:
f.write(json.dumps(conf))
try:
# os.environ["MOJO_MODE"] = "production"
subprocess.Popen("hypnotoad ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
try:
subprocess.call("hypnotoad -s ./app.pl", shell=True, cwd="mojolicious", stderr=errfile, stdout=logfile)
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hypnotoad' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
except subprocess.CalledProcessError:
return 1
| Python | 0.000002 |
8d935a2141b8f5c080d922189df7d79bb838b3a0 | Use default router implementation | mopidy_lux/router.py | mopidy_lux/router.py | import os
from tinydb import TinyDB
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware
import tornado.web
from mopidy import http
class LuxRouter(http.Router):
name = 'lux'
def setup_routes(self):
db = TinyDB(
self.config['lux']['db_file'],
storage=CachingMiddleware(JSONStorage)
)
args = dict(
config=self.config,
db=db
)
return [
(r"/%s/(.*)" % self.name, http.StaticFileHandler, {
'path': os.path.join(os.path.dirname(__file__), 'static'),
'default_filename': 'index.html'
}),
(r"/%s/playlist" % self.name, Playlists, args),
(r"/%s/loved" % self.name, Loved, args),
(r"/%s/discover" % self.name, EchoNestsDiscover, args),
]
class Playlists(tornado.web.RequestHandler):
"""
Permanent storage for playlists
"""
pass
class Loved(tornado.web.RequestHandler):
"""
Permanent storage for loved songs
"""
pass
class EchoNestsDiscover(tornado.web.RequestHandler):
"""
Discover tracks based on mood or similarity
"""
pass
| import os
from tinydb import TinyDB
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware
import tornado.web
class LuxRouter(object):
def __init__(self, _config):
self.config = _config
self._db = TinyDB(
self.config['lux']['db_file'],
storage=CachingMiddleware(JSONStorage)
)
def setup_routes(self):
args = dict(
config=self.config,
db=self._db
)
return [
(r"/lux/(.*)", tornado.web.StaticFileHandler, {
'path': os.path.join(os.path.dirname(__file__), 'static'),
'default_filename': 'index.html'
}),
(r"/lux/playlist", Playlists, args),
(r"/lux/loved", Loved, args),
(r"/lux/discover", EchoNestsDiscover, args),
]
class Playlists(tornado.web.RequestHandler):
"""
Permanent storage for playlists
"""
pass
class Loved(tornado.web.RequestHandler):
"""
Permanent storage for loved songs
"""
pass
class EchoNestsDiscover(tornado.web.RequestHandler):
"""
Discover tracks based on mood or similarity
"""
pass
| Python | 0 |
32481a906e00a1c5d301e6227ab43cf8feba31e0 | fix double-import trap | json5/__init__.py | json5/__init__.py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A pure Python implementation of the JSON5 configuration language."""
from .lib import load, loads, dump, dumps
from .version import VERSION
__all__ = [
'VERSION',
'dump',
'dumps',
'load',
'loads',
]
| # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A pure Python implementation of the JSON5 configuration language."""
from . import tool
from .lib import load, loads, dump, dumps
from .version import VERSION
__all__ = [
'VERSION',
'dump',
'dumps',
'load',
'loads',
'tool',
]
| Python | 0.000212 |
b2e7eeafb263c12a333056e6a8239d2534833a22 | Allow specifying --couchbase-root | binding.gyp | binding.gyp | {
'targets': [{
'target_name': 'couchbase_impl',
'defines': ['LCBUV_EMBEDDED_SOURCE'],
'conditions': [
[ 'OS=="win"', {
'variables': {
'couchbase_root%': 'C:/couchbase'
},
'include_dirs': [
'<(couchbase_root)/include/',
],
'link_settings': {
'libraries': [
'-l<(couchbase_root)/lib/libcouchbase.lib',
],
},
'copies': [{
'files': [ '<(couchbase_root)/bin/libcouchbase.dll' ],
'destination': '<(module_root_dir)/build/Release/',
},],
'configurations': {
'Release': {
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': '2',
'RuntimeLibrary': 0,
},
},
},
},
}],
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
},
}],
['OS!="win"', {
'variables' : {
'couchbase_root%' : '""'
},
'link_settings': {
'libraries': [
'$(EXTRA_LDFLAGS)',
'-lcouchbase',
],
},
'cflags': [
'-g',
'-fPIC',
'-Wall',
'-Wextra',
'-Wno-unused-variable',
'-Wno-unused-function',
'$(EXTRA_CFLAGS)',
'$(EXTRA_CPPFLAGS)',
'$(EXTRA_CXXFLAGS)',
],
'cflags_c':[
'-pedantic',
'-std=gnu99',
],
'conditions': [
[ 'couchbase_root!=""', {
'include_dirs': [ '<(couchbase_root)/include' ],
'libraries+': [
'-L<(couchbase_root)/lib',
'-Wl,-rpath=<(couchbase_root)/lib'
]
}]
],
}]
],
'sources': [
'src/couchbase_impl.cc',
'src/control.cc',
'src/constants.cc',
'src/namemap.cc',
'src/cookie.cc',
'src/commandbase.cc',
'src/commands.cc',
'src/exception.cc',
'src/options.cc',
'src/cas.cc',
'src/uv-plugin-all.c'
],
'include_dirs': [
'./',
'./src/io'
],
}]
}
| {
'targets': [{
'target_name': 'couchbase_impl',
'defines': ['LCBUV_EMBEDDED_SOURCE'],
'conditions': [
[ 'OS=="win"', {
'variables': {
'couchbase_root%': 'C:/couchbase'
},
'include_dirs': [
'<(couchbase_root)/include/',
],
'link_settings': {
'libraries': [
'-l<(couchbase_root)/lib/libcouchbase.lib',
],
},
'copies': [{
'files': [ '<(couchbase_root)/bin/libcouchbase.dll' ],
'destination': '<(module_root_dir)/build/Release/',
},],
'configurations': {
'Release': {
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': '2',
'RuntimeLibrary': 0,
},
},
},
},
}],
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
},
}],
['OS!="win"', {
'link_settings': {
'libraries': [
'$(EXTRA_LDFLAGS)',
'-lcouchbase',
],
},
'cflags': [
'-g',
'-fPIC',
'-Wall',
'-Wextra',
'-Wno-unused-variable',
'-Wno-unused-function',
'$(EXTRA_CFLAGS)',
'$(EXTRA_CPPFLAGS)',
'$(EXTRA_CXXFLAGS)',
],
'cflags_c':[
'-pedantic',
'-std=gnu99',
],
'conditions': [
[ 'couchbase_root!=""', {
'include_dirs': [ '<(couchbase_root)/include' ],
'libraries+': [
'-L<(couchbase_root)/lib',
'-Wl,-rpath=<(couchbase_root)/lib'
]
}]
],
}]
],
'sources': [
'src/couchbase_impl.cc',
'src/control.cc',
'src/constants.cc',
'src/namemap.cc',
'src/cookie.cc',
'src/commandbase.cc',
'src/commands.cc',
'src/exception.cc',
'src/options.cc',
'src/cas.cc',
'src/uv-plugin-all.c'
],
'include_dirs': [
'./',
],
}]
}
| Python | 0 |
c91147dee9d9910cfc8e6c2e078d388f19d6ab1e | fix build error in FreeBSD (#25) | binding.gyp | binding.gyp | {
"targets": [{
"target_name": "node_snap7",
"include_dirs": [
"<!(node -e \"require('nan')\")",
"./src"
],
"sources": [
"./src/node_snap7.cpp",
"./src/node_snap7_client.cpp",
"./src/node_snap7_server.cpp",
"./src/snap7.cpp"
],
"conditions": [
["OS=='win'", {
"libraries": ["-lws2_32.lib", "-lwinmm.lib"],
"defines": ["_WINSOCK_DEPRECATED_NO_WARNINGS"]
}]
],
"dependencies": [
"snap7"
]
}, {
"target_name": "snap7",
"type": "static_library",
"include_dirs": [
"./deps/snap7/src/sys",
"./deps/snap7/src/core",
"./deps/snap7/src/lib"
],
"sources": [
"./deps/snap7/src/sys/snap_msgsock.cpp",
"./deps/snap7/src/sys/snap_sysutils.cpp",
"./deps/snap7/src/sys/snap_tcpsrvr.cpp",
"./deps/snap7/src/sys/snap_threads.cpp",
"./deps/snap7/src/core/s7_client.cpp",
"./deps/snap7/src/core/s7_isotcp.cpp",
"./deps/snap7/src/core/s7_partner.cpp",
"./deps/snap7/src/core/s7_peer.cpp",
"./deps/snap7/src/core/s7_server.cpp",
"./deps/snap7/src/core/s7_text.cpp",
"./deps/snap7/src/core/s7_micro_client.cpp",
"./deps/snap7/src/lib/snap7_libmain.cpp"
],
"conditions": [
["OS=='linux' or OS=='freebsd'", {
"cflags_cc": ["-fPIC", "-pedantic", "-fexceptions"],
"cflags_cc!": ["-fno-exceptions"]
}],
["OS=='win'", {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"AdditionalOptions": ["/EHsc"] # ExceptionHandling=1 is not enough for some versions
}
},
"defines!": ["_HAS_EXCEPTIONS=0"],
"defines": ["_WINSOCK_DEPRECATED_NO_WARNINGS"]
}],
["OS=='mac'", {
"xcode_settings": {
"GCC_ENABLE_CPP_EXCEPTIONS": "YES",
"GCC_DYNAMIC_NO_PIC": "NO",
"OTHER_CFLAGS": ["-pedantic"]
}
}]
]
}]
}
| {
"targets": [{
"target_name": "node_snap7",
"include_dirs": [
"<!(node -e \"require('nan')\")",
"./src"
],
"sources": [
"./src/node_snap7.cpp",
"./src/node_snap7_client.cpp",
"./src/node_snap7_server.cpp",
"./src/snap7.cpp"
],
"conditions": [
["OS=='win'", {
"libraries": ["-lws2_32.lib", "-lwinmm.lib"],
"defines": ["_WINSOCK_DEPRECATED_NO_WARNINGS"]
}]
],
"dependencies": [
"snap7"
]
}, {
"target_name": "snap7",
"type": "static_library",
"include_dirs": [
"./deps/snap7/src/sys",
"./deps/snap7/src/core",
"./deps/snap7/src/lib"
],
"sources": [
"./deps/snap7/src/sys/snap_msgsock.cpp",
"./deps/snap7/src/sys/snap_sysutils.cpp",
"./deps/snap7/src/sys/snap_tcpsrvr.cpp",
"./deps/snap7/src/sys/snap_threads.cpp",
"./deps/snap7/src/core/s7_client.cpp",
"./deps/snap7/src/core/s7_isotcp.cpp",
"./deps/snap7/src/core/s7_partner.cpp",
"./deps/snap7/src/core/s7_peer.cpp",
"./deps/snap7/src/core/s7_server.cpp",
"./deps/snap7/src/core/s7_text.cpp",
"./deps/snap7/src/core/s7_micro_client.cpp",
"./deps/snap7/src/lib/snap7_libmain.cpp"
],
"conditions": [
["OS=='linux'", {
"cflags_cc": ["-fPIC", "-pedantic", "-fexceptions"],
"cflags_cc!": ["-fno-exceptions"]
}],
["OS=='win'", {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"AdditionalOptions": ["/EHsc"] # ExceptionHandling=1 is not enough for some versions
}
},
"defines!": ["_HAS_EXCEPTIONS=0"],
"defines": ["_WINSOCK_DEPRECATED_NO_WARNINGS"]
}],
["OS=='mac'", {
"xcode_settings": {
"GCC_ENABLE_CPP_EXCEPTIONS": "YES",
"GCC_DYNAMIC_NO_PIC": "NO",
"OTHER_CFLAGS": ["-pedantic"]
}
}]
]
}]
}
| Python | 0 |
95bde4f783a4d11627d8bc64e24b383e945bdf01 | Revert local CDN location set by Jodok | src/web/tags.py | src/web/tags.py | # -*- coding: utf-8 -*-
# vim: set fileencodings=utf-8
__docformat__ = "reStructuredText"
import json
import datetime
from django.template.base import Library
from django.utils.safestring import mark_safe
register = Library()
CDN_URL = 'https://cdn.crate.io'
def media(context, media_url):
"""
Get the path for a media file.
"""
if media_url.startswith('http://') or media_url.startswith('https://'):
url = media_url
elif media_url.startswith('/'):
url = u'{0}{1}'.format(CDN_URL, media_url)
else:
url = u'{0}/media/{1}'.format(CDN_URL, media_url)
return url
register.simple_tag(takes_context=True)(media)
| # -*- coding: utf-8 -*-
# vim: set fileencodings=utf-8
__docformat__ = "reStructuredText"
import json
import datetime
from django.template.base import Library
from django.utils.safestring import mark_safe
register = Library()
#CDN_URL = 'https://cdn.crate.io'
CDN_URL = 'http://localhost:8001'
def media(context, media_url):
"""
Get the path for a media file.
"""
if media_url.startswith('http://') or media_url.startswith('https://'):
url = media_url
elif media_url.startswith('/'):
url = u'{0}{1}'.format(CDN_URL, media_url)
else:
url = u'{0}/media/{1}'.format(CDN_URL, media_url)
return url
register.simple_tag(takes_context=True)(media)
| Python | 0 |
06f328b5843d83946b353697745ec82c7741ee3e | Allow colons in record label URLs (for timestamps such as '2013-02-13_08:42:00'). | src/web/urls.py | src/web/urls.py | """
Define URL dispatching for the Sumatra web interface.
"""
from django.conf.urls.defaults import *
from django.views.generic import list_detail
from django.conf import settings
from sumatra.web.views import Timeline
P = {
'project': r'(?P<project>\w+[\w ]*)',
'label': r'(?P<label>\w+[\w|\-\.:]*)',
}
urlpatterns = patterns('sumatra.web.views',
(r'^$', 'list_projects'),
(r'^%(project)s/$' % P, 'list_records'),
(r'^%(project)s/about/$' % P, 'show_project'),
(r'^%(project)s/delete/$' % P, 'delete_records'),
(r'^%(project)s/tag/$' % P, 'list_tags'),
(r'^%(project)s/%(label)s/$' % P, 'record_detail'),
(r'^%(project)s/%(label)s/datafile$' % P, 'show_file'),
(r'^%(project)s/%(label)s/download$' % P, 'download_file'),
(r'^%(project)s/%(label)s/image$' % P, 'show_image'),
(r'^%(project)s/%(label)s/diff/(?P<package>[\w_]+)*$' % P, 'show_diff'),
(r'^%(project)s/simulation$' % P, 'run_sim'),
(r'^%(project)s/settings$' % P, 'settings'),
(r'^%(project)s/search$' % P, 'search'),
(r'^%(project)s/settags$' % P, 'set_tags'),
)
urlpatterns += patterns('',
(r'^timeline/(?P<user>\w+[\w ]*)/', Timeline.as_view()),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
)
| """
Define URL dispatching for the Sumatra web interface.
"""
from django.conf.urls.defaults import *
from django.views.generic import list_detail
from django.conf import settings
from sumatra.web.views import Timeline
P = {
'project': r'(?P<project>\w+[\w ]*)',
'label': r'(?P<label>\w+[\w|\-\.]*)',
}
urlpatterns = patterns('sumatra.web.views',
(r'^$', 'list_projects'),
(r'^%(project)s/$' % P, 'list_records'),
(r'^%(project)s/about/$' % P, 'show_project'),
(r'^%(project)s/delete/$' % P, 'delete_records'),
(r'^%(project)s/tag/$' % P, 'list_tags'),
(r'^%(project)s/%(label)s/$' % P, 'record_detail'),
(r'^%(project)s/%(label)s/datafile$' % P, 'show_file'),
(r'^%(project)s/%(label)s/download$' % P, 'download_file'),
(r'^%(project)s/%(label)s/image$' % P, 'show_image'),
(r'^%(project)s/%(label)s/diff/(?P<package>[\w_]+)*$' % P, 'show_diff'),
(r'^%(project)s/simulation$' % P, 'run_sim'),
(r'^%(project)s/settings$' % P, 'settings'),
(r'^%(project)s/search$' % P, 'search'),
(r'^%(project)s/settags$' % P, 'set_tags'),
)
urlpatterns += patterns('',
(r'^timeline/(?P<user>\w+[\w ]*)/', Timeline.as_view()),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) | Python | 0.000007 |
a1cbeb7f7a03d0618ec9f60f65308168e521af18 | Add encodings for imul instructions to RISC-V. | meta/isa/riscv/encodings.py | meta/isa/riscv/encodings.py | """
RISC-V Encodings.
"""
from __future__ import absolute_import
from cretonne import base
from .defs import RV32, RV64
from .recipes import OPIMM, OPIMM32, OP, OP32, R, Rshamt, I
from .settings import use_m
# Basic arithmetic binary instructions are encoded in an R-type instruction.
for inst, inst_imm, f3, f7 in [
(base.iadd, base.iadd_imm, 0b000, 0b0000000),
(base.isub, None, 0b000, 0b0100000),
(base.bxor, base.bxor_imm, 0b100, 0b0000000),
(base.bor, base.bor_imm, 0b110, 0b0000000),
(base.band, base.band_imm, 0b111, 0b0000000)
]:
RV32.enc(inst.i32, R, OP(f3, f7))
RV64.enc(inst.i64, R, OP(f3, f7))
# Immediate versions for add/xor/or/and.
if inst_imm:
RV32.enc(inst_imm.i32, I, OPIMM(f3))
RV64.enc(inst_imm.i64, I, OPIMM(f3))
# 32-bit ops in RV64.
RV64.enc(base.iadd.i32, R, OP32(0b000, 0b0000000))
RV64.enc(base.isub.i32, R, OP32(0b000, 0b0100000))
# There are no andiw/oriw/xoriw variations.
RV64.enc(base.iadd_imm.i32, I, OPIMM32(0b000))
# Dynamic shifts have the same masking semantics as the cton base instructions.
for inst, inst_imm, f3, f7 in [
(base.ishl, base.ishl_imm, 0b001, 0b0000000),
(base.ushr, base.ushr_imm, 0b101, 0b0000000),
(base.sshr, base.sshr_imm, 0b101, 0b0100000),
]:
RV32.enc(inst.i32.i32, R, OP(f3, f7))
RV64.enc(inst.i64.i64, R, OP(f3, f7))
RV64.enc(inst.i32.i32, R, OP32(f3, f7))
# Allow i32 shift amounts in 64-bit shifts.
RV64.enc(inst.i64.i32, R, OP(f3, f7))
RV64.enc(inst.i32.i64, R, OP32(f3, f7))
# Immediate shifts.
RV32.enc(inst_imm.i32, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i64, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i32, Rshamt, OPIMM32(f3, f7))
# "M" Standard Extension for Integer Multiplication and Division.
# Gated by the `use_m` flag.
RV32.enc(base.imul.i32, R, OP(0b000, 0b0000001), isap=use_m)
RV64.enc(base.imul.i64, R, OP(0b000, 0b0000001), isap=use_m)
RV64.enc(base.imul.i32, R, OP32(0b000, 0b0000001), isap=use_m)
| """
RISC-V Encodings.
"""
from __future__ import absolute_import
from cretonne import base
from .defs import RV32, RV64
from .recipes import OPIMM, OPIMM32, OP, OP32, R, Rshamt, I
# Basic arithmetic binary instructions are encoded in an R-type instruction.
for inst, inst_imm, f3, f7 in [
(base.iadd, base.iadd_imm, 0b000, 0b0000000),
(base.isub, None, 0b000, 0b0100000),
(base.bxor, base.bxor_imm, 0b100, 0b0000000),
(base.bor, base.bor_imm, 0b110, 0b0000000),
(base.band, base.band_imm, 0b111, 0b0000000)
]:
RV32.enc(inst.i32, R, OP(f3, f7))
RV64.enc(inst.i64, R, OP(f3, f7))
# Immediate versions for add/xor/or/and.
if inst_imm:
RV32.enc(inst_imm.i32, I, OPIMM(f3))
RV64.enc(inst_imm.i64, I, OPIMM(f3))
# 32-bit ops in RV64.
RV64.enc(base.iadd.i32, R, OP32(0b000, 0b0000000))
RV64.enc(base.isub.i32, R, OP32(0b000, 0b0100000))
# There are no andiw/oriw/xoriw variations.
RV64.enc(base.iadd_imm.i32, I, OPIMM32(0b000))
# Dynamic shifts have the same masking semantics as the cton base instructions.
for inst, inst_imm, f3, f7 in [
(base.ishl, base.ishl_imm, 0b001, 0b0000000),
(base.ushr, base.ushr_imm, 0b101, 0b0000000),
(base.sshr, base.sshr_imm, 0b101, 0b0100000),
]:
RV32.enc(inst.i32.i32, R, OP(f3, f7))
RV64.enc(inst.i64.i64, R, OP(f3, f7))
RV64.enc(inst.i32.i32, R, OP32(f3, f7))
# Allow i32 shift amounts in 64-bit shifts.
RV64.enc(inst.i64.i32, R, OP(f3, f7))
RV64.enc(inst.i32.i64, R, OP32(f3, f7))
# Immediate shifts.
RV32.enc(inst_imm.i32, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i64, Rshamt, OPIMM(f3, f7))
RV64.enc(inst_imm.i32, Rshamt, OPIMM32(f3, f7))
| Python | 0.000003 |
4c547687662f7ea2a12d876291adb6e0bed85fc8 | Fix database relationships | database.py | database.py | #
# database.py
#
# set up and manage a database for storing data between sessions
#
from sqlalchemy import Column, ForeignKey, Integer, String, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Server(Base):
__tablename__ = 'servers'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
owner.id = Column(Integer, ForeignKey('users.id'))
owner = relationship(User, backref('servers', uselist=True))
class Role(Base):
__tablename__ = 'roles'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
server.id = Column(Integer, ForeignKey('servers.id'))
server = relationship(Server, backref('roles', uselist=True))
class Channel(Base):
__tablename__ = 'channels'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
server.id = Column(Integer, ForeignKey('servers.id'))
server = relationship(Server, backref('roles', uselist=True))
squelch = Column(Boolean, nullable=False)
class CommandClass(Base):
__tablename__ = 'commandclasses'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Command(Base):
__tablename__ = 'commands'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
squelch = Column(Boolean, nullable=False)
command_class.id = Column(Integer, ForeignKey('commandclasses.id'))
command_class = relationship(CommandClass, backref('commands', uselist=True))
class RoleCommandAccess(Base):
__tablename__ = 'rolecommands'
id = Column(Integer, primary_key=True)
role = Column(Integer, ForeignKey('roles.id'))
command.id = Column(Integer, ForeignKey('commands.id'))
command = relationship(Command, backref('rolecommands', uselist=True))
squelch = Column(Boolean, nullable=False)
class RoleCommandClassAccess(Base):
__tablename__ = 'rolecommandclasses'
id = Column(Integer, primary_key=True)
role = Column(Integer, ForeignKey('roles.id'))
command_class.id = Column(Integer, ForeignKey('commandclasses.id'))
command_class = relationship(CommandClass, backref('commands', uselist=True))
squelch = Column(Boolean, nullable=False)
class UserCommandAccess(Base):
__tablename__ = 'usercommands'
id = Column(Integer, primary_key=True)
user = Column(Integer, ForeignKey('users.id'))
command.id = Column(Integer, ForeignKey('commands.id'))
command = relationship(Command, backref('rolecommands', uselist=True))
squelch = Column(Boolean, nullable=False)
class UserCommandClassAccess(Base):
__tablename__ = 'usercommandclasses'
id = Column(Integer, primary_key=True)
user = Column(Integer, ForeignKey('users.id'))
command_class.id = Column(Integer, ForeignKey('commandclasses.id'))
command_class = relationship(CommandClass, backref('commands', uselist=True))
squelch = Column(Boolean, nullable=False)
# Create an engine that stores data in the local directory's
# sqlalchemy_example.db file.
engine = create_engine('sqlite:///susumu_takuan.db')
# Create all tables in the engine. This is equivalent to "Create Table"
# statements in raw SQL.
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine) | #
# database.py
#
# set up and manage a database for storing data between sessions
#
from sqlalchemy import Column, ForeignKey, Integer, String, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Server(Base):
__tablename__ = 'servers'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
owner = Column(Integer, ForeignKey('users.id'))
class Role(Base):
__tablename__ = 'roles'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
server = Column(Integer, ForeignKey('servers.id'))
class Channel(Base):
__tablename__ = 'channels'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
server = Column(Integer, ForeignKey('servers.id'))
squelch = Column(Boolean, nullable=False)
class CommandClass(Base):
__tablename__ = 'commandclasses'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Command(Base):
__tablename__ = 'commands'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
squelch = Column(Boolean, nullable=False)
command_class = Column(Integer, ForeignKey('commandclasses.id'))
class RoleCommandAccess(Base):
__tablename__ = 'rolecommands'
id = Column(Integer, primary_key=True)
role = Column(Integer, ForeignKey('roles.id'))
command = Column(Integer, ForeignKey('commands.id'))
squelch = Column(Boolean, nullable=False)
class RoleCommandClassAccess(Base):
__tablename__ = 'rolecommandclasses'
id = Column(Integer, primary_key=True)
role = Column(Integer, ForeignKey('roles.id'))
command_class = Column(Integer, ForeignKey('commandclasses.id'))
squelch = Column(Boolean, nullable=False)
class UserCommandAccess(Base):
__tablename__ = 'usercommands'
id = Column(Integer, primary_key=True)
user = Column(Integer, ForeignKey('users.id'))
command = Column(Integer, ForeignKey('commands.id'))
squelch = Column(Boolean, nullable=False)
class UserCommandClassAccess(Base):
__tablename__ = 'usercommandclasses'
id = Column(Integer, primary_key=True)
user = Column(Integer, ForeignKey('users.id'))
command_class = Column(Integer, ForeignKey('commandclasses.id'))
squelch = Column(Boolean, nullable=False)
# Create an engine that stores data in the local directory's
# sqlalchemy_example.db file.
engine = create_engine('sqlite:///susumu_takuan.db')
# Create all tables in the engine. This is equivalent to "Create Table"
# statements in raw SQL.
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine) | Python | 0.000003 |
d017c2a2e09d043caecd555217a399453c7e60b8 | fix migration imports | eventstore/migrations/0050_askfeedback.py | eventstore/migrations/0050_askfeedback.py | # Generated by Django 2.2.24 on 2021-12-07 06:26
import uuid
import django.contrib.postgres.fields.jsonb
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("eventstore", "0049_auto_20211202_1220")]
operations = [
migrations.CreateModel(
name="AskFeedback",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("contact_id", models.UUIDField()),
("question_answered", models.BooleanField(default=False)),
("timestamp", models.DateTimeField(default=django.utils.timezone.now)),
(
"created_by",
models.CharField(blank=True, default="", max_length=255),
),
(
"data",
django.contrib.postgres.fields.jsonb.JSONField(
blank=True, default=dict, null=True
),
),
],
)
]
| # Generated by Django 2.2.24 on 2021-12-07 06:26
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
dependencies = [("eventstore", "0049_auto_20211202_1220")]
operations = [
migrations.CreateModel(
name="AskFeedback",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("contact_id", models.UUIDField()),
("question_answered", models.BooleanField(default=False)),
("timestamp", models.DateTimeField(default=django.utils.timezone.now)),
(
"created_by",
models.CharField(blank=True, default="", max_length=255),
),
(
"data",
django.contrib.postgres.fields.jsonb.JSONField(
blank=True, default=dict, null=True
),
),
],
)
]
| Python | 0.000001 |
5763c341a1660e13b70780a37d822eed65b00255 | refactor example fit_text_path_into_box.py | examples/addons/fit_text_path_into_box.py | examples/addons/fit_text_path_into_box.py | # Copyright (c) 2021-2022, Manfred Moitzi
# License: MIT License
import pathlib
import ezdxf
from ezdxf import path, zoom
from ezdxf.math import Matrix44
from ezdxf.tools import fonts
from ezdxf.addons import text2path
CWD = pathlib.Path("~/Desktop/Outbox").expanduser()
if not CWD.exists():
CWD = pathlib.Path(".")
# ------------------------------------------------------------------------------
# This example shows how create outline paths from a text and fit them into a
# specified rectangle.
# ------------------------------------------------------------------------------
def main():
doc = ezdxf.new()
msp = doc.modelspace()
ff = fonts.FontFace(family="Arial")
box_width, box_height = 4, 2
# Draw the target box:
msp.add_lwpolyline(
[(0, 0), (box_width, 0), (box_width, box_height), (0, box_height)],
close=True,
dxfattribs={"color": 1},
)
# Convert text string into path objects:
text_as_paths = text2path.make_paths_from_str("Squeeze Me", ff)
# Fit text paths into a given box size by scaling, does not move the path
# objects:
# - uniform=True, keeps the text aspect ratio
# - uniform=False, scales the text to touch all 4 sides of the box
final_paths = path.fit_paths_into_box(
text_as_paths, size=(box_width, box_height, 0), uniform=False
)
# Mirror text about the x-axis
final_paths = path.transform_paths(final_paths, Matrix44.scale(-1, 1, 1))
# Move bottom/left corner to (0, 0) if required:
bbox = path.bbox(final_paths)
dx, dy, dz = -bbox.extmin
final_paths = path.transform_paths(
final_paths, Matrix44.translate(dx, dy, dz)
)
path.render_lwpolylines(
msp, final_paths, distance=0.01, dxfattribs={"color": 2}
)
zoom.extents(msp)
doc.saveas(CWD / "SqueezeMe.dxf")
if __name__ == "__main__":
main()
| # Copyright (c) 2021, Manfred Moitzi
# License: MIT License
from pathlib import Path
import ezdxf
from ezdxf import path, zoom
from ezdxf.math import Matrix44
from ezdxf.tools import fonts
from ezdxf.addons import text2path
DIR = Path("~/Desktop/Outbox").expanduser()
fonts.load()
doc = ezdxf.new()
doc.layers.new("OUTLINE")
doc.layers.new("FILLING")
msp = doc.modelspace()
attr = {"color": 2}
ff = fonts.FontFace(family="Arial")
sx, sy = 4, 2
# create the target box:
msp.add_lwpolyline(
[(0, 0), (sx, 0), (sx, sy), (0, sy)], close=True, dxfattribs={"color": 1}
)
# convert text string into path objects:
text_as_paths = text2path.make_paths_from_str("Squeeze Me", ff)
# fit text paths into a given box size by scaling, does not move the path objects:
# uniform=True, keeps the text aspect ratio
# uniform=False, scales the text to touch all 4 sides of the box
final_paths = path.fit_paths_into_box(
text_as_paths, size=(sx, sy, 0), uniform=False
)
# mirror text along x-axis
final_paths = path.transform_paths(final_paths, Matrix44.scale(-1, 1, 1))
# move bottom/left corner to (0, 0) if required:
bbox = path.bbox(final_paths)
dx, dy, dz = -bbox.extmin
final_paths = path.transform_paths(final_paths, Matrix44.translate(dx, dy, dz))
path.render_lwpolylines(
msp, final_paths, distance=0.01, dxfattribs={"color": 2}
)
zoom.extents(msp)
doc.saveas(DIR / "SqeezeMe.dxf")
| Python | 0.001126 |
daed646ff987bc86b333a995bac1283360a583ef | bump up version to 0.1.2 | src/javactl/__init__.py | src/javactl/__init__.py | __version__ = '0.1.2'
| __version__ = '0.1.1'
| Python | 0.000007 |
fce501b446d2a4133a244f86653bdc683f4f03de | test project manager using initial DB & validation code added | buildbuild/projects/tests/test_project_manager.py | buildbuild/projects/tests/test_project_manager.py | from django.test import TestCase
from projects.models import Project
from teams.models import Team
from django.db import IntegrityError
from django.core.exceptions import ValidationError
class TestProjectName(TestCase):
fixtures = ['properties_data.yaml']
def setUp(self):
self.name = "test_project_name"
self.second_name = "test_second_project_name"
self.invalid_long_length_name = "a" * 65
self.team_name = "test_team_name"
self.lang_python = "python"
self.ver_python_278 = "2.7.8"
self.project = Project.objects.create_project(
name = self.name,
)
self.second_project = Project.objects.create_project(
name = self.second_name,
)
def test_create_project_must_contain_name(self):
self.assertRaises(
TypeError,
Project.objects.create_project,
team_name = self.team_name,
properties = {self.lang_python : self.ver_python_278}
)
def test_create_project_name_min_length_1(self):
self.assertRaises(
ValidationError,
Project.objects.create_project,
name = "",
)
def test_project_name_max_length_64(self):
self.assertRaises(
ValidationError,
Project.objects.create_project,
name = self.invalid_long_length_name,
)
def test_get_all_projects(self):
projects = Project.objects.all()
self.assertEqual(projects[0].name, self.project.name)
self.assertEqual(projects[1].name, self.second_project.name)
def test_check_project_unique_name(self):
self.assertRaises(
IntegrityError,
Project.objects.create_project,
name = self.name,
)
def test_get_project_equal_to_project_targetted(self):
get_project = Project.objects.get_project(self.project.id)
self.assertEqual(
self.project,
get_project,
"get_project should be equal to target project",
)
def test_properties_field_must_dict(self):
self.assertRaises(
TypeError,
Project.objects.create_project,
name = self.project.name,
team_name = self.team_name,
properties = (self.lang_python, self.ver_python_278)
)
| from django.test import TestCase
from projects.models import Project
from teams.models import Team
from django.db import IntegrityError
from django.core.exceptions import ValidationError
class TestProjectName(TestCase):
def setUp(self):
self.name = "test_project_name"
self.second_name = "test_second_project_name"
self.invalid_long_length_name = "a" * 65
self.team_name = "test_team_name"
self.lang = "python"
self.ver = "2.7.8"
self.project = Project.objects.create_project(
name = self.name,
)
self.second_project = Project.objects.create_project(
name = self.second_name,
)
def test_create_project_must_contain_name(self):
self.assertRaises(
TypeError,
Project.objects.create_project,
team_name = self.team_name,
properties = ('python','2.7.8')
)
def test_create_project_name_min_length_1(self):
try:
project = Project.objects.create_project(
name = ""
)
except ValidationError:
pass
def test_project_name_max_length_64(self):
try:
Project.objects.create_project(
name = self.invalid_long_length_name,
)
except ValidationError:
pass
def test_get_all_projects(self):
projects = Project.objects.all()
self.assertEqual(projects[0].name, self.project.name)
self.assertEqual(projects[1].name, self.second_project.name)
# Integrity
def test_project_should_have_unique_name(self):
try:
Project.objects.create_project(
name = self.name,
)
except IntegrityError:
pass
# Assert
def test_get_project_equal_to_project_targetted(self):
get_project = Project.objects.get_project(self.project.id)
self.assertEqual(
self.project,
get_project,
"get_project should be equal to target project",
)
| Python | 0 |
be778b351e6b6af18a786265851142a1b9dd420a | remove erroneous quotes in isinstance() | networkx/classes/labeledgraph.py | networkx/classes/labeledgraph.py | from graph import Graph
from digraph import DiGraph
from networkx.exception import NetworkXException, NetworkXError
import networkx.convert as convert
class LabeledGraph(Graph):
def __init__(self, data=None, name='', weighted=True):
super(LabeledGraph,self).__init__(data,name,weighted)
# node labels
if hasattr(data,'label') and isinstance(data.label,dict):
self.label=data.label.copy()
else:
self.label = {}
def add_node(self, n, data=None):
super(LabeledGraph,self).add_node(n)
if data is not None:
self.label[n]=data
def add_nodes_from(self, nbunch, data=None):
for nd in nbunch:
try:
n,data=nd
except (TypeError,ValueError):
n=nd
data=None
self.add_node(n,data)
def remove_node(self, n):
super(LabeledGraph,self).remove_node(n)
try:
del self.label[n]
except KeyError:
pass
def remove_nodes_from(self, nbunch):
for n in nbunch:
self.remove_node(n)
def nodes_iter(self, nbunch=None, data=False):
if nbunch is None:
nbunch=self.adj.iterkeys()
else:
nbunch=self.nbunch_iter(nbunch)
if data:
for n in nbunch:
data=self.label.get(n,None)
yield (n,data)
else:
for n in nbunch:
yield n
def nodes(self, nbunch=None, data=False):
if data:
return dict(self.nodes_iter(nbunch,data))
else:
return list(self.nodes_iter(nbunch))
def get_node(self, n):
if n not in self.adj:
raise NetworkXError("node %s not in graph"%(n,))
else:
data=self.label.get(n,None)
return data
def clear(self):
super(LabeledGraph,self).clear()
self.label={}
def subgraph(self, nbunch, copy=True):
H=super(LabeledGraph,self).subgraph(nbunch, copy)
H.label=dict( (k,v) for k,v in self.label.items() if k in H)
return H
def to_directed(self):
H=super(LabeledGraph,self).to_directed()
H.label=dict( (k,v) for k,v in self.label.items() if k in H)
return H
class LabeledDiGraph(LabeledGraph,DiGraph):
pass # just use the inherited classes
| from graph import Graph
from digraph import DiGraph
from networkx.exception import NetworkXException, NetworkXError
import networkx.convert as convert
class LabeledGraph(Graph):
def __init__(self, data=None, name='', weighted=True):
super(LabeledGraph,self).__init__(data,name,weighted)
# node labels
if hasattr(data,'label') and isinstance(data.label,'dict'):
self.label=data.label.copy()
else:
self.label = {}
def add_node(self, n, data=None):
super(LabeledGraph,self).add_node(n)
if data is not None:
self.label[n]=data
def add_nodes_from(self, nbunch, data=None):
for nd in nbunch:
try:
n,data=nd
except (TypeError,ValueError):
n=nd
data=None
self.add_node(n,data)
def remove_node(self, n):
super(LabeledGraph,self).remove_node(n)
try:
del self.label[n]
except KeyError:
pass
def remove_nodes_from(self, nbunch):
for n in nbunch:
self.remove_node(n)
def nodes_iter(self, nbunch=None, data=False):
if nbunch is None:
nbunch=self.adj.iterkeys()
else:
nbunch=self.nbunch_iter(nbunch)
if data:
for n in nbunch:
data=self.label.get(n,None)
yield (n,data)
else:
for n in nbunch:
yield n
def nodes(self, nbunch=None, data=False):
if data:
return dict(self.nodes_iter(nbunch,data))
else:
return list(self.nodes_iter(nbunch))
def get_node(self, n):
if n not in self.adj:
raise NetworkXError("node %s not in graph"%(n,))
else:
data=self.label.get(n,None)
return data
def clear(self):
super(LabeledGraph,self).clear()
self.label={}
def subgraph(self, nbunch, copy=True):
H=super(LabeledGraph,self).subgraph(nbunch, copy)
H.label=dict( (k,v) for k,v in self.label.items() if k in H)
return H
def to_directed(self):
H=super(LabeledGraph,self).to_directed()
H.label=dict( (k,v) for k,v in self.label.items() if k in H)
return H
class LabeledDiGraph(LabeledGraph,DiGraph):
pass # just use the inherited classes
| Python | 0.000042 |
c6453752f9630a760cd2b2508d9ba39413871d86 | Update SensorMotorTest.py | 04Dan/SensorMotorTest.py | 04Dan/SensorMotorTest.py | import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
##GPIO.setup(18, GPIO.OUT) servo
##GPIO.setup(22, GPIO.OUT) motor
GPIO.setup(16, GPIO.IN) ##button
try:
while True:
i = GPIO.input(16)
print(i)
delay(1000)
except Keyboardinterrupt:
GPIO.cleanup()
| import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
##GPIO.setup(18, GPIO.OUT) servo
##GPIO.setup(22, GPIO.OUT) motor
GPIO.setup(16, GPIO.IN) ##button
try:
while True:
i = GPIO.input(16)
print(i)
delay(1000)
except Keyboardinterupt:
GPIO.cleanup()
| Python | 0 |
c3ecc4a06a212da11f52c9c0cd5c7b5c8d500516 | Support -h/--help on createdb.py | createdb.py | createdb.py | #!/usr/bin/env python
import sys
import fedmsg.config
import fmn.lib.models
config = fedmsg.config.load_config()
uri = config.get('fmn.sqlalchemy.uri')
if not uri:
raise ValueError("fmn.sqlalchemy.uri must be present")
if '-h' in sys.argv or '--help'in sys.argv:
print "createdb.py [--with-dev-data]"
sys.exit(0)
session = fmn.lib.models.init(uri, debug=True, create=True)
if '--with-dev-data' in sys.argv:
user1 = fmn.lib.models.User.get_or_create(session, username="ralph")
user2 = fmn.lib.models.User.get_or_create(session, username="toshio")
user3 = fmn.lib.models.User.get_or_create(session, username="toshio")
context1 = fmn.lib.models.Context.create(
session, name="irc", description="Internet Relay Chat",
detail_name="irc nick", icon="user",
placeholder="z3r0_c00l",
)
context2 = fmn.lib.models.Context.create(
session, name="email", description="Electronic Mail",
detail_name="email address", icon="envelope",
placeholder="jane@fedoraproject.org",
)
context3 = fmn.lib.models.Context.create(
session, name="gcm", description="Google Cloud Messaging",
detail_name="registration id", icon="phone",
placeholder="laksdjfasdlfkj183097falkfj109f"
)
prefs1 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context1,
detail_value="threebean",
)
prefs2 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context2,
detail_value="ralph@fedoraproject.org",
)
session.commit()
| #!/usr/bin/env python
import sys
import fedmsg.config
import fmn.lib.models
config = fedmsg.config.load_config()
uri = config.get('fmn.sqlalchemy.uri')
if not uri:
raise ValueError("fmn.sqlalchemy.uri must be present")
session = fmn.lib.models.init(uri, debug=True, create=True)
if '--with-dev-data' in sys.argv:
user1 = fmn.lib.models.User.get_or_create(session, username="ralph")
user2 = fmn.lib.models.User.get_or_create(session, username="toshio")
user3 = fmn.lib.models.User.get_or_create(session, username="toshio")
context1 = fmn.lib.models.Context.create(
session, name="irc", description="Internet Relay Chat",
detail_name="irc nick", icon="user",
placeholder="z3r0_c00l",
)
context2 = fmn.lib.models.Context.create(
session, name="email", description="Electronic Mail",
detail_name="email address", icon="envelope",
placeholder="jane@fedoraproject.org",
)
context3 = fmn.lib.models.Context.create(
session, name="gcm", description="Google Cloud Messaging",
detail_name="registration id", icon="phone",
placeholder="laksdjfasdlfkj183097falkfj109f"
)
prefs1 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context1,
detail_value="threebean",
)
prefs2 = fmn.lib.models.Preference.create(
session,
user=user1,
context=context2,
detail_value="ralph@fedoraproject.org",
)
session.commit()
| Python | 0 |
29205582e07eaa8b28eea4b0691a9556d0999015 | Remove unused LoginForm | src/keybar/web/forms.py | src/keybar/web/forms.py | from django.utils.translation import ugettext_lazy as _
from django.contrib import auth
import floppyforms.__future__ as forms
from keybar.models.user import User
class RegisterForm(forms.ModelForm):
name = forms.CharField(label=_('Your name'),
widget=forms.TextInput(
attrs={'placeholder': _('e.g Jorah Mormont')}))
email = forms.EmailField(label=_('Email'))
class Meta:
model = User
fields = ('name', 'email')
| from django.utils.translation import ugettext_lazy as _
from django.contrib import auth
import floppyforms.__future__ as forms
from keybar.models.user import User
class RegisterForm(forms.ModelForm):
name = forms.CharField(label=_('Your name'),
widget=forms.TextInput(
attrs={'placeholder': _('e.g Jorah Mormont')}))
email = forms.EmailField(label=_('Email'))
class Meta:
model = User
fields = ('name', 'email')
class LoginForm(forms.Form):
email = forms.EmailField(label=_('Email'))
password = forms.CharField(label=_('Password'), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _('Please enter a correct email and password. '
'Note that both fields may be case-sensitive.'),
}
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.authenticated_user = None
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
self.authenticated_user = auth.authenticate(email=email, password=password)
if self.authenticated_user is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login')
return self.cleaned_data
def get_user(self):
return self.authenticated_user
| Python | 0.000001 |
2626b5dbfe91a6b8fee7beab370e60a5a474c699 | Add my implementation of kind() | CS212/Lesson-01/poker.py | CS212/Lesson-01/poker.py | #
# In the first Lesson of the class we are attempting to
# build a Poker program.
#
def poker(hands):
"Return the best hand: poker([hand,...]) => hand"
return max(hands, key=hand_rank)
def hand_rank(hand):
ranks = card_ranks(hand)
if straight(ranks) and flush(hand): # straight flush
return (8, max(ranks))
elif kind(4, ranks): # 4 of a kind
return (7, kind(4, ranks), kind(1, ranks))
elif kind(3, ranks) and kind(2, ranks): # full house
return (6, kind(3, ranks), kind(2, ranks))
elif flush(hand): # flush
return (5, ranks)
elif straight(ranks): # straight
return (4, max(hand))
elif kind(3, ranks): # 3 of a kind
return (3, kind(3, ranks), ranks)
elif two_pair(ranks): # 2 pair
return (2, two_pair(ranks))
elif kind(2, ranks): # kind
return (1, ranks)
else: # high card
return (0, ranks)
def card_ranks(cards):
"Return a list of the ranks, sorted with higher first."
RANK_MAP = dict(zip(["T", "J", "Q", "K", "A"], range(10, 15)))
def rank_to_int(card):
r, s = card
if r in RANK_MAP:
return RANK_MAP[r]
else:
return int(r)
ranks = map(rank_to_int, cards)
ranks.sort(reverse=True)
return ranks
def straight(ranks):
"Return True if the ordered ranks form a 5-card straight."
return (max(ranks) - min(ranks) == 4) and len(set(ranks)) == 5
def flush(hand):
"Return True if all the cards have the same suit."
suits = [s for r, s in hand]
return len(set(suits)) == 1
def kind(n, ranks):
"""Return the first rank that this hand has exactly n of.
Return None if there is no n-of-a-kind in the hand."""
last = None
count = 0
for rank in ranks:
if last != rank:
if count == n: return last
count = 0
last = rank
count += 1
if count == n:
return last
def test():
"Test cases for the functions in poker program"
sf = "6C 7C 8C 9C TC".split() # Straight Flush
fk = "9D 9H 9S 9C 7D".split() # Four of a Kind
fh = "TD TC TH 7C 7D".split() # Full House
assert card_ranks(sf) == [10, 9, 8, 7, 6]
assert card_ranks(fk) == [9, 9, 9, 9, 7]
assert card_ranks(fh) == [10, 10, 10, 7, 7]
fkranks = card_ranks(fk)
assert kind(4, fkranks) == 9
assert kind(3, fkranks) is None
assert kind(2, fkranks) is None
assert kind(1, fkranks) == 7
assert hand_rank(sf) == (8, 10)
assert hand_rank(fk) == (7, 9, 7)
assert hand_rank(fh) == (6, 10, 7)
assert poker([sf, fk, fh]) == sf
assert poker([fk, fh]) == fk
assert poker([fh, fh]) == fh
assert poker([sf]) == sf
assert poker([sf] + 99 * [fh]) == sf
return 'tests pass'
print test()
| #
# In the first Lesson of the class we are attempting to
# build a Poker program.
#
def poker(hands):
"Return the best hand: poker([hand,...]) => hand"
return max(hands, key=hand_rank)
def hand_rank(hand):
ranks = card_ranks(hand)
if straight(ranks) and flush(hand): # straight flush
return (8, max(ranks))
elif kind(4, ranks): # 4 of a kind
return (7, kind(4, ranks), kind(1, ranks))
elif kind(3, ranks) and kind(2, ranks): # full house
return (6, kind(3, ranks), kind(2, ranks))
elif flush(hand): # flush
return (5, ranks)
elif straight(ranks): # straight
return (4, max(hand))
elif kind(3, ranks): # 3 of a kind
return (3, kind(3, ranks), ranks)
elif two_pair(ranks): # 2 pair
return (2, two_pair(ranks))
elif kind(2, ranks): # kind
return (1, ranks)
else: # high card
return (0, ranks)
def card_ranks(cards):
"Return a list of the ranks, sorted with higher first."
RANK_MAP = dict(zip(["T", "J", "Q", "K", "A"], range(10, 15)))
def rank_to_int(card):
r, s = card
if r in RANK_MAP:
return RANK_MAP[r]
else:
return int(r)
ranks = map(rank_to_int, cards)
ranks.sort(reverse=True)
return ranks
def straight(ranks):
"Return True if the ordered ranks form a 5-card straight."
return (max(ranks) - min(ranks) == 4) and len(set(ranks)) == 5
def flush(hand):
"Return True if all the cards have the same suit."
suits = [s for r, s in hand]
return len(set(suits)) == 1
def test():
"Test cases for the functions in poker program"
sf = "6C 7C 8C 9C TC".split() # Straight Flush
fk = "9D 9H 9S 9C 7D".split() # Four of a Kind
fh = "TD TC TH 7C 7D".split() # Full House
assert card_ranks(sf) == [10, 9, 8, 7, 6]
assert card_ranks(fk) == [9, 9, 9, 9, 7]
assert card_ranks(fh) == [10, 10, 10, 7, 7]
assert poker([sf, fk, fh]) == sf
assert poker([fk, fh]) == fk
assert poker([fh, fh]) == fh
assert poker([sf]) == sf
assert poker([sf] + 99*[fh]) == sf
assert hand_rank(sf) == (8, 10)
assert hand_rank(fk) == (7, 9, 7)
assert hand_rank(fh) == (6, 10, 7)
return 'tests pass'
print test()
| Python | 0 |
d004bf46236a4a0e2bd72b7106e58d2dcfdc2bf8 | Add type reflection tests | test/sqlalchemy/test_introspection.py | test/sqlalchemy/test_introspection.py | from sqlalchemy import Table, Column, MetaData, testing, ForeignKey, UniqueConstraint, \
CheckConstraint
from sqlalchemy.types import Integer, String, Boolean
import sqlalchemy.types as sqltypes
from sqlalchemy.testing import fixtures
from sqlalchemy.dialects.postgresql import INET
from sqlalchemy.dialects.postgresql import UUID
meta = MetaData()
customer_table = Table('customer', meta,
Column('id', Integer, primary_key=True),
Column('name', String),
Column('email', String),
Column('verified', Boolean),
UniqueConstraint('email'))
order_table = Table('order', meta,
Column('id', Integer, primary_key=True),
Column('customer_id', Integer, ForeignKey('customer.id')),
Column('info', String),
Column('status', String, CheckConstraint("status in ('open', 'closed')")))
# Regression test for https://github.com/cockroachdb/cockroach/issues/26993
index_table = Table('index', meta,
Column('index', Integer, primary_key=True))
view_table = Table('view', meta,
Column('view', Integer, primary_key=True))
class IntrospectionTest(fixtures.TestBase):
def teardown_method(self, method):
meta.drop_all(testing.db)
def setup_method(self):
meta.create_all(testing.db)
def test_create_metadata(self):
# Create a metadata via introspection on the live DB.
meta2 = MetaData(testing.db)
# TODO(bdarnell): Do more testing.
# For now just make sure it doesn't raise exceptions.
# This covers get_foreign_keys(), which is apparently untested
# in SQLAlchemy's dialect test suite.
Table('customer', meta2, autoload=True)
Table('order', meta2, autoload=True)
Table('index', meta2, autoload=True)
Table('view', meta2, autoload=True)
class TestTypeReflection(fixtures.TestBase):
TABLE_NAME = 't'
COLUMN_NAME = 'c'
@testing.provide_metadata
def _test(self, typ, expected):
testing.db.execute(
'CREATE TABLE {} ({} {})'.format(
self.TABLE_NAME,
self.COLUMN_NAME,
typ,
)
)
t = Table(self.TABLE_NAME, self.metadata, autoload=True)
c = t.c[self.COLUMN_NAME]
assert isinstance(c.type, expected)
def test_boolean(self):
for t in ['bool', 'boolean']:
self._test(t, sqltypes.BOOLEAN)
def test_int(self):
for t in ['bigint', 'int', 'int2', 'int4', 'int64', 'int8', 'integer', 'smallint']:
self._test(t, sqltypes.INT)
def test_float(self):
for t in ['double precision', 'float', 'float4', 'float8', 'real']:
self._test(t, sqltypes.FLOAT)
def test_decimal(self):
for t in ['dec', 'decimal', 'numeric']:
self._test(t, sqltypes.DECIMAL)
def test_date(self):
self._test('date', sqltypes.DATE)
def test_time(self):
for t in ['time', 'time without time zone']:
self._test(t, sqltypes.Time)
def test_timestamp(self):
types = [
'timestamp',
'timestamptz',
'timestamp with time zone',
'timestamp without time zone',
]
for t in types:
self._test(t, sqltypes.TIMESTAMP)
def test_interval(self):
self._test('interval', sqltypes.Interval)
def test_varchar(self):
types = [
'char',
'char varying',
'character',
'character varying',
'string',
'text',
'varchar',
]
for t in types:
self._test(t, sqltypes.VARCHAR)
def test_blob(self):
for t in ['blob', 'bytea', 'bytes']:
self._test(t, sqltypes.BLOB)
def test_json(self):
for t in ['json', 'jsonb']:
self._test(t, sqltypes.JSON)
def test_uuid(self):
self._test('uuid', UUID)
def test_inet(self):
self._test('inet', INET)
| from sqlalchemy import Table, Column, MetaData, testing, ForeignKey, UniqueConstraint, \
CheckConstraint
from sqlalchemy.types import Integer, String, Boolean
from sqlalchemy.testing import fixtures
meta = MetaData()
customer_table = Table('customer', meta,
Column('id', Integer, primary_key=True),
Column('name', String),
Column('email', String),
Column('verified', Boolean),
UniqueConstraint('email'))
order_table = Table('order', meta,
Column('id', Integer, primary_key=True),
Column('customer_id', Integer, ForeignKey('customer.id')),
Column('info', String),
Column('status', String, CheckConstraint("status in ('open', 'closed')")))
# Regression test for https://github.com/cockroachdb/cockroach/issues/26993
index_table = Table('index', meta,
Column('index', Integer, primary_key=True))
view_table = Table('view', meta,
Column('view', Integer, primary_key=True))
class IntrospectionTest(fixtures.TestBase):
def teardown_method(self, method):
meta.drop_all(testing.db)
def setup_method(self):
meta.create_all(testing.db)
def test_create_metadata(self):
# Create a metadata via introspection on the live DB.
meta2 = MetaData(testing.db)
# TODO(bdarnell): Do more testing.
# For now just make sure it doesn't raise exceptions.
# This covers get_foreign_keys(), which is apparently untested
# in SQLAlchemy's dialect test suite.
Table('customer', meta2, autoload=True)
Table('order', meta2, autoload=True)
Table('index', meta2, autoload=True)
Table('view', meta2, autoload=True)
| Python | 0 |
8d8002062a0ecbf3720870d7561670a8c7e98da2 | Fix test for auth tokens store | test/stores/test_auth_tokens_store.py | test/stores/test_auth_tokens_store.py | from test.base import ApiTestCase
from zou.app.stores import auth_tokens_store
class CommandsTestCase(ApiTestCase):
def setUp(self):
super(CommandsTestCase, self).setUp()
self.store = auth_tokens_store
self.store.clear()
def tearDown(self):
self.store.clear()
def test_get_and_add(self):
self.assertIsNone(self.store.get("key-1"))
self.store.add("key-1", "true")
self.assertEquals(self.store.get("key-1"), "true")
def test_delete(self):
self.store.add("key-1", "true")
self.store.delete("key-1")
self.assertIsNone(self.store.get("key-1"))
def test_is_revoked(self):
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "true")
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "false")
self.assertFalse(self.store.is_revoked({"jti": "key-1"}))
def test_keys(self):
self.store.add("key-1", "true")
self.store.add("key-2", "true")
self.assertTrue("key-1" in self.store.keys())
self.assertTrue("key-2" in self.store.keys())
| from test.base import ApiTestCase
from zou.app.stores import auth_tokens_store
class CommandsTestCase(ApiTestCase):
def setUp(self):
super(CommandsTestCase, self).setUp()
self.store = auth_tokens_store
self.store.clear()
def tearDown(self):
self.store.clear()
def test_get_and_add(self):
self.assertIsNone(self.store.get("key-1"))
self.store.add("key-1", "true")
self.assertEquals(self.store.get("key-1"), "true")
def test_delete(self):
self.store.add("key-1", "true")
self.store.delete("key-1")
self.assertIsNone(self.store.get("key-1"))
def test_is_revoked(self):
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "true")
self.assertTrue(self.store.is_revoked({"jti": "key-1"}))
self.store.add("key-1", "false")
self.assertFalse(self.store.is_revoked({"jti": "key-1"}))
def test_keys(self):
self.store.add("key-1", "true")
self.store.add("key-2", "true")
self.assertEquals(
self.store.keys(), ["key-1", "key-2"]
)
| Python | 0.000002 |
639a692bc06cf31b5feb1d990740976884f88a0c | Fix key format (?) | testlog_etl/transforms/jscov_to_es.py | testlog_etl/transforms/jscov_to_es.py | # encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Trung Do (chin.bimbo@gmail.com)
#
from __future__ import division
from __future__ import unicode_literals
import json
from pyLibrary.dot import Dict
from pyLibrary.dot import wrap
from pyLibrary.env import http
from testlog_etl.transforms import EtlHeadGenerator
from testlog_etl.transforms.pulse_block_to_es import scrub_pulse_record
def process(source_key, source, destination, resources, please_stop=None):
keys = []
records = []
etl_header_gen = EtlHeadGenerator(source_key)
for i, line in enumerate(source.read_lines()):
stats = Dict()
pulse_record = scrub_pulse_record(source_key, i, line, stats)
artifact_file_name = pulse_record.artifact.name
# we're only interested in jscov files, at lease at the moment
if "jscov" not in artifact_file_name:
continue
# construct the artifact's full url
taskId = pulse_record.status.taskId
runId = pulse_record.runId
full_artifact_path = "https://public-artifacts.taskcluster.net/" + taskId + "/" + str(runId) + "/" + artifact_file_name
# fetch the artifact
response = http.get(full_artifact_path).all_content
# transform
json_data = wrap(json.loads(response))
for j, obj in enumerate(json_data):
# get the test name. Just use the test file name at the moment
# TODO: change this when needed
test_name = obj.testUrl.split("/")[-1]
for line in obj.covered:
dest_key, dest_etl = etl_header_gen.next(pulse_record.etl, j)
key = dest_key + "." + unicode(j)
new_line = {
"test": {
"name": test_name,
"url": obj.testUrl
},
"source": {
"sourceFile": obj.sourceFile,
"lineCovered": line
},
"etl": dest_etl
}
records.append({"id": key, "value": new_line})
keys.append(key)
destination.extend(records)
return keys
| # encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Trung Do (chin.bimbo@gmail.com)
#
from __future__ import division
from __future__ import unicode_literals
import json
from pyLibrary.dot import Dict
from pyLibrary.dot import wrap
from pyLibrary.env import http
from testlog_etl.transforms import EtlHeadGenerator
from testlog_etl.transforms.pulse_block_to_es import scrub_pulse_record
def process(source_key, source, destination, resources, please_stop=None):
keys = []
records = []
etl_header_gen = EtlHeadGenerator(source_key)
for i, line in enumerate(source.read_lines()):
stats = Dict()
pulse_record = scrub_pulse_record(source_key, i, line, stats)
artifact_file_name = pulse_record.artifact.name
# we're only interested in jscov files, at lease at the moment
if "jscov" not in artifact_file_name:
continue
# construct the artifact's full url
taskId = pulse_record.status.taskId
runId = pulse_record.runId
full_artifact_path = "https://public-artifacts.taskcluster.net/" + taskId + "/" + str(runId) + "/" + artifact_file_name
# fetch the artifact
response = http.get(full_artifact_path).all_content
# transform
json_data = wrap(json.loads(response))
for j, obj in enumerate(json_data):
# get the test name. Just use the test file name at the moment
# TODO: change this when needed
test_name = obj.testUrl.split("/")[-1]
for line in obj.covered:
dest_key, dest_etl = etl_header_gen.next(pulse_record.etl, j)
new_line = {
"test": {
"name": test_name,
"url": obj.testUrl
},
"source": {
"sourceFile": obj.sourceFile,
"lineCovered": line
},
"etl": dest_etl
}
records.append({"id": dest_key, "value": new_line})
keys.append(dest_key)
destination.extend(records)
return keys
| Python | 0.00006 |
d4f2fadd94603eea2c15f5bb8a2a7d29c0d39ed0 | Hello David | CreateM3Us/CreateM3Us.py | CreateM3Us/CreateM3Us.py | import os
incomingDirectory = 'C:\\temp'
for subdir, dirs, files in os.walk(incomingDirectory): #What does is.walk do
for file in files:
#print os.path.join(subdir, file)
filepath = subdir + os.sep + file
print (filepath)
# File input/output
# https://www.digitalocean.com/community/tutorials/how-to-handle-plain-text-files-in-python-3
fileA = open(incomingDirectory + '/something.txt', 'w')
fileA.write("Some text")
fileA.close()
# Desired Output/Structure
"""
C:\temp\GenreA\GenreA.m3u
C:\temp\GenreA\Artist1\Artist1.m3u
C:\temp\GenreA\Artist1\AlbumA\FileA.txt
C:\temp\GenreA\Artist1\AlbumA\FileB.txt
C:\temp\GenreA\Artist1\AlbumB\FileA.txt
C:\temp\GenreA\Artist1\AlbumB\FileB.txt
C:\temp\GenreA\Artist1\AlbumB\FileC.txt
C:\temp\GenreA\Artist2\Artist2.m3u
C:\temp\GenreA\Artist2\AlbumA\FileA.txt
C:\temp\GenreA\Artist2\AlbumA\FileB.txt
C:\temp\GenreA\Artist2\AlbumB\FileA.txt
C:\temp\GenreA\Artist2\AlbumB\FileB.txt
C:\temp\GenreA\Artist2\AlbumB\FileC.txt
"""
# M3U file (C:\temp\GenreA\GenreA.m3u)
"""
Artist1/AlbumA/FileA.txt
Artist1/AlbumA/FileB.txt
Artist1/AlbumB/FileA.txt
Artist1/AlbumB/FileB.txt
Artist1/AlbumB/FileC.txt
Artist2/...
"""
#M3U file (C:\temp\GenreA\Artist1\Artist1.m3u)
"""
AlbumA/FileA.txt
AlbumA/FileB.txt
AlbumB/FileA.txt
AlbumB/FileB.txt
AlbumB/FileC.txt
""" | import os
incomingDirectory = 'C:\\temp'
for subdir, dirs, files in os.walk(incomingDirectory):
for file in files:
#print os.path.join(subdir, file)
filepath = subdir + os.sep + file
print (filepath)
# File input/output
# https://www.digitalocean.com/community/tutorials/how-to-handle-plain-text-files-in-python-3
fileA = open(incomingDirectory + '/something.txt', 'w')
fileA.write("Some text")
fileA.close()
# Desired Output/Structure
"""
C:\temp\GenreA\GenreA.m3u
C:\temp\GenreA\Artist1\Artist1.m3u
C:\temp\GenreA\Artist1\AlbumA\FileA.txt
C:\temp\GenreA\Artist1\AlbumA\FileB.txt
C:\temp\GenreA\Artist1\AlbumB\FileA.txt
C:\temp\GenreA\Artist1\AlbumB\FileB.txt
C:\temp\GenreA\Artist1\AlbumB\FileC.txt
C:\temp\GenreA\Artist2\Artist2.m3u
C:\temp\GenreA\Artist2\AlbumA\FileA.txt
C:\temp\GenreA\Artist2\AlbumA\FileB.txt
C:\temp\GenreA\Artist2\AlbumB\FileA.txt
C:\temp\GenreA\Artist2\AlbumB\FileB.txt
C:\temp\GenreA\Artist2\AlbumB\FileC.txt
"""
# M3U file (C:\temp\GenreA\GenreA.m3u)
"""
Artist1/AlbumA/FileA.txt
Artist1/AlbumA/FileB.txt
Artist1/AlbumB/FileA.txt
Artist1/AlbumB/FileB.txt
Artist1/AlbumB/FileC.txt
Artist2/...
"""
#M3U file (C:\temp\GenreA\Artist1\Artist1.m3u)
"""
AlbumA/FileA.txt
AlbumA/FileB.txt
AlbumB/FileA.txt
AlbumB/FileB.txt
AlbumB/FileC.txt
""" | Python | 0.999971 |
0f1551db96cd27ed20e62545cac1540a405e8f1a | fix bug | FlaskWebProject/views.py | FlaskWebProject/views.py | """
Routes and views for the flask application.
"""
import os
from datetime import datetime
from flask import render_template, request
from FlaskWebProject import app
from generate_summary_json import generate_summary_json
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page',
year=datetime.now().year,
)
@app.route('/summarize', methods=['GET'])
def summarize():
access_token = os.getenv('TEST_TEAM_SLACK_ACCESS_TOKEN')
member_id = request.args.get('user_id')
channel_id = request.args.get('channel_id')
channel_name = request.args.get('channel_name')
num_messages = request.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, access_token)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | """
Routes and views for the flask application.
"""
import os
from datetime import datetime
from flask import render_template, request
from FlaskWebProject import app
from generate_summary_json import generate_summary_json
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page',
year=datetime.now().year,
)
@app.route('/summarize', methods=['GET'])
def summarize():
access_token = os.getenv('TREEHACKS_SLACK_ACCESS_TOKEN')
member_id = request.args.get('user_id')
channel_id = request.args.get('channel_id')
channel_name = request.args.get('channel_name')
num_messages = request.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, TEST_TEAM_SLACK_ACCESS_TOKEN)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | Python | 0.000001 |
529987bb17a05c041cdbf3bbe2a98edda72872fc | remove unneeded Todo | InvenTree/plugin/urls.py | InvenTree/plugin/urls.py | """
URL lookup for plugin app
"""
from django.conf.urls import url, include
from plugin import plugin_reg
PLUGIN_BASE = 'plugin' # Constant for links
def get_plugin_urls():
"""returns a urlpattern that can be integrated into the global urls"""
urls = []
for plugin in plugin_reg.plugins.values():
if plugin.mixin_enabled('urls'):
urls.append(plugin.urlpatterns)
return url(f'^{PLUGIN_BASE}/', include((urls, 'plugin')))
| """
URL lookup for plugin app
"""
from django.conf.urls import url, include
from plugin import plugin_reg
PLUGIN_BASE = 'plugin' # Constant for links
def get_plugin_urls():
"""returns a urlpattern that can be integrated into the global urls"""
urls = []
for plugin in plugin_reg.plugins.values():
if plugin.mixin_enabled('urls'):
urls.append(plugin.urlpatterns)
# TODO wrap everything in plugin_url_wrapper
return url(f'^{PLUGIN_BASE}/', include((urls, 'plugin')))
| Python | 0.000036 |
6137a6f00abbeb81b080f534481bb255f950dd83 | access oauth token securely through azure | FlaskWebProject/views.py | FlaskWebProject/views.py | """
Routes and views for the Flask application.
"""
import os
from flask import render_template, request
from FlaskWebProject import app
from generate_summary_json import generate_summary_json
ACCESS_TOKEN = os.getenv('TREEHACKS_SLACK_ACCESS_TOKEN')
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page'
)
# text is number of messages
@app.route('/summarize', methods=['GET'])
def summarize(ACCESS_TOKEN):
member_id = requests.args.get('user_id')
channel_id = requests.args.get('channel_id')
channel_name = requests.args.get('channel_name')
num_messages = requests.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, TEST_TEAM_SLACK_ACCESS_TOKEN)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | """
Routes and views for the Flask application.
"""
from flask import render_template, request
from FlaskWebProject import app
from oauth_constants import TEST_TEAM_SLACK_ACCESS_TOKEN
from generate_summary_json import generate_summary_json
global TEST_TEAM_SLACK_ACCESS_TOKEN
@app.route('/')
@app.route('/home')
def home():
"""Renders the home page."""
return render_template(
'index.html',
title='Home Page'
)
# text is number of messages
@app.route('/summarize', methods=['GET'])
def summarize():
member_id = requests.args.get('user_id')
channel_id = requests.args.get('channel_id')
channel_name = requests.args.get('channel_name')
num_messages = requests.args.get('text')
summary_json = generate_summary_json(member_id, channel_id, channel_name, num_messages, TEST_TEAM_SLACK_ACCESS_TOKEN)
return {'text': channel_name, 'private': True}
if __name__ == '__main__':
app.run(debug=True) | Python | 0 |
cd5f824a2d756c8770be6f47d946c7e39c85228e | Fix postcode importing all | molly/apps/places/providers/postcodes.py | molly/apps/places/providers/postcodes.py | import simplejson, urllib, random, csv, zipfile, tempfile, urllib2, os.path
from django.contrib.gis.geos import Point
from molly.apps.places.providers import BaseMapsProvider
from molly.apps.places.models import Entity, EntityType, Source
from molly.conf.settings import batch
class PostcodesMapsProvider(BaseMapsProvider):
def __init__(self, codepoint_path, import_areas=None):
self.codepoint_path = codepoint_path
self.import_areas = import_areas
@batch('%d 12 1 1 *' % random.randint(0, 59))
def import_data(self, metadata, output):
entity_type, source = self._get_entity_type(), self._get_source()
if not os.path.exists(self.codepoint_path):
archive_url = urllib2.urlopen('http://freepostcodes.org.uk/static/code-point-open/codepo_gb.zip')
archive_file = open(self.codepoint_path, 'w')
archive_file.write(archive_url.read())
archive_file.close()
archive = zipfile.ZipFile(self.codepoint_path)
if self.import_areas:
filenames = ['Code-Point Open/data/CSV/%s.csv' % code.lower() for code in self.import_areas]
else:
filenames = [path for path in archive.namelist() if re.match(r'Code\-Point Open\/data\/CSV\/[a-z]{1,2}.csv', path)]
for filename in filenames:
if hasattr(archive, 'open'):
f = archive.open(filename)
else:
f = tempfile.TemporaryFile()
f.write(archive.read(filename))
f.seek(0)
reader = csv.reader(f)
self._load_from_csv(reader, entity_type, source)
del f
def _load_from_csv(self, reader, entity_type, source):
j = 0
for i, line in enumerate(reader):
postcode_abbrev, (easting, northing) = line[0], line[10:12]
if postcode_abbrev[-4] != ' ':
postcode = '%s %s' % (postcode_abbrev[:-3], postcode_abbrev[-3:])
else:
postcode = postcode_abbrev
postcode_abbrev = postcode_abbrev.replace(' ', '')
try:
easting, northing = int(easting), int(northing)
except ValueError:
continue
j += 1
try:
entity = Entity.objects.get(source=source, _identifiers__scheme='postcode', _identifiers__value=postcode_abbrev)
except Entity.DoesNotExist:
entity = Entity(source=source)
entity.title = postcode
entity.location = Point(easting, northing, srid=27700)
entity.geometry = entity.location
entity.primary_type = entity_type
identifiers = {
'postcode': postcode_abbrev,
'postcode-canonical': postcode,
}
entity.save(identifiers=identifiers)
entity.all_types.add(entity_type)
entity.update_all_types_completion()
def _get_entity_type(self):
try:
return EntityType.objects.get(slug='post-code')
except EntityType.DoesNotExist:
entity_type = EntityType(
slug = 'post-code',
article = 'a',
verbose_name = 'postcode',
verbose_name_plural = 'postcodes',
show_in_nearby_list = False,
show_in_category_list = False,
)
entity_type.save()
return entity_type
def _get_source(self):
try:
source = Source.objects.get(module_name="molly.providers.apps.maps.postcodes")
except Source.DoesNotExist:
source = Source(module_name="molly.providers.apps.maps.postcodes")
source.name = "Postcodes"
source.save()
return source
| import simplejson, urllib, random, csv, zipfile, tempfile, urllib2, os.path
from django.contrib.gis.geos import Point
from molly.apps.places.providers import BaseMapsProvider
from molly.apps.places.models import Entity, EntityType, Source
from molly.conf.settings import batch
class PostcodesMapsProvider(BaseMapsProvider):
def __init__(self, codepoint_path, import_areas=None):
self.codepoint_path = codepoint_path
self.import_areas = import_areas
@batch('%d 12 1 1 *' % random.randint(0, 59))
def import_data(self, metadata, output):
entity_type, source = self._get_entity_type(), self._get_source()
if not os.path.exists(self.codepoint_path):
archive_url = urllib2.urlopen('http://freepostcodes.org.uk/static/code-point-open/codepo_gb.zip')
archive_file = open(self.codepoint_path, 'w')
archive_file.write(archive_url.read())
archive_file.close()
archive = zipfile.ZipFile(self.codepoint_path)
if self.import_areas:
filenames = ['Code-Point Open/data/CSV/%s.csv' % code.lower() for code in self.import_areas]
else:
filenames = [path for path in archive.listnames() if re.match(r'Code\-Point Open\/data\/CSV\/[a-z]{1,2}.csv', path)]
for filename in filenames:
if hasattr(archive, 'open'):
f = archive.open(filename)
else:
f = tempfile.TemporaryFile()
f.write(archive.read(filename))
f.seek(0)
reader = csv.reader(f)
self._load_from_csv(reader, entity_type, source)
del f
def _load_from_csv(self, reader, entity_type, source):
j = 0
for i, line in enumerate(reader):
postcode_abbrev, (easting, northing) = line[0], line[10:12]
if postcode_abbrev[-4] != ' ':
postcode = '%s %s' % (postcode_abbrev[:-3], postcode_abbrev[-3:])
else:
postcode = postcode_abbrev
postcode_abbrev = postcode_abbrev.replace(' ', '')
try:
easting, northing = int(easting), int(northing)
except ValueError:
continue
j += 1
try:
entity = Entity.objects.get(source=source, _identifiers__scheme='postcode', _identifiers__value=postcode_abbrev)
except Entity.DoesNotExist:
entity = Entity(source=source)
entity.title = postcode
entity.location = Point(easting, northing, srid=27700)
entity.geometry = entity.location
entity.primary_type = entity_type
identifiers = {
'postcode': postcode_abbrev,
'postcode-canonical': postcode,
}
entity.save(identifiers=identifiers)
entity.all_types.add(entity_type)
entity.update_all_types_completion()
def _get_entity_type(self):
try:
return EntityType.objects.get(slug='post-code')
except EntityType.DoesNotExist:
entity_type = EntityType(
slug = 'post-code',
article = 'a',
verbose_name = 'postcode',
verbose_name_plural = 'postcodes',
show_in_nearby_list = False,
show_in_category_list = False,
)
entity_type.save()
return entity_type
def _get_source(self):
try:
source = Source.objects.get(module_name="molly.providers.apps.maps.postcodes")
except Source.DoesNotExist:
source = Source(module_name="molly.providers.apps.maps.postcodes")
source.name = "Postcodes"
source.save()
return source
| Python | 0 |
e5cc051bc7be854e253853d85b1de8b3037170be | always convert to floats | nbgrader/preprocessors/overwritecells.py | nbgrader/preprocessors/overwritecells.py | from IPython.nbformat.v4.nbbase import validate
from nbgrader import utils
from nbgrader.api import Gradebook
from nbgrader.preprocessors import NbGraderPreprocessor
class OverwriteCells(NbGraderPreprocessor):
"""A preprocessor to overwrite information about grade and solution cells."""
def preprocess(self, nb, resources):
# pull information from the resources
self.notebook_id = resources['nbgrader']['notebook']
self.assignment_id = resources['nbgrader']['assignment']
self.db_url = resources['nbgrader']['db_url']
# connect to the database
self.gradebook = Gradebook(self.db_url)
nb, resources = super(OverwriteCells, self).preprocess(nb, resources)
return nb, resources
def update_cell_type(self, cell, cell_type):
if cell.cell_type == cell_type:
return
elif cell_type == 'code':
cell.cell_type = 'code'
cell.outputs = []
cell.execution_count = None
validate(cell, 'code_cell')
elif cell_type == 'markdown':
cell.cell_type = 'markdown'
if 'outputs' in cell:
del cell['outputs']
if 'execution_count' in cell:
del cell['execution_count']
validate(cell, 'markdown_cell')
def report_change(self, name, attr, old, new):
self.log.warning(
"Attribute '%s' for cell %s has changed! (should be: %s, got: %s)", attr, name, old, new)
def preprocess_cell(self, cell, resources, cell_index):
grade_id = cell.metadata.get('nbgrader', {}).get('grade_id', None)
if grade_id is None:
return cell, resources
source_cell = self.gradebook.find_source_cell(
grade_id,
self.notebook_id,
self.assignment_id)
# check that the cell type hasn't changed
if cell.cell_type != source_cell.cell_type:
self.report_change(grade_id, "cell_type", source_cell.cell_type, cell.cell_type)
self.update_cell_type(cell, source_cell.cell_type)
# check that the locked status hasn't changed
if utils.is_locked(cell) != source_cell.locked:
self.report_change(grade_id, "locked", source_cell.locked, utils.is_locked(cell))
cell.metadata.nbgrader["locked"] = source_cell.locked
# if it's a grade cell, check that the max score hasn't changed
if utils.is_grade(cell):
grade_cell = self.gradebook.find_grade_cell(
grade_id,
self.notebook_id,
self.assignment_id)
old_points = float(grade_cell.max_score)
new_points = float(cell.metadata.nbgrader["points"])
if old_points != new_points:
self.report_change(grade_id, "points", old_points, new_points)
cell.metadata.nbgrader["points"] = old_points
# always update the checksum, just in case
cell.metadata.nbgrader["checksum"] = source_cell.checksum
# if it's locked, check that the checksum hasn't changed
if source_cell.locked:
old_checksum = source_cell.checksum
new_checksum = utils.compute_checksum(cell)
if old_checksum != new_checksum:
self.report_change(grade_id, "checksum", old_checksum, new_checksum)
cell.source = source_cell.source
# double check the the checksum is correct now
if utils.compute_checksum(cell) != source_cell.checksum:
raise RuntimeError("Inconsistent checksums for cell {}".format(source_cell.name))
return cell, resources
| from IPython.nbformat.v4.nbbase import validate
from nbgrader import utils
from nbgrader.api import Gradebook
from nbgrader.preprocessors import NbGraderPreprocessor
class OverwriteCells(NbGraderPreprocessor):
"""A preprocessor to overwrite information about grade and solution cells."""
def preprocess(self, nb, resources):
# pull information from the resources
self.notebook_id = resources['nbgrader']['notebook']
self.assignment_id = resources['nbgrader']['assignment']
self.db_url = resources['nbgrader']['db_url']
# connect to the database
self.gradebook = Gradebook(self.db_url)
nb, resources = super(OverwriteCells, self).preprocess(nb, resources)
return nb, resources
def update_cell_type(self, cell, cell_type):
if cell.cell_type == cell_type:
return
elif cell_type == 'code':
cell.cell_type = 'code'
cell.outputs = []
cell.execution_count = None
validate(cell, 'code_cell')
elif cell_type == 'markdown':
cell.cell_type = 'markdown'
if 'outputs' in cell:
del cell['outputs']
if 'execution_count' in cell:
del cell['execution_count']
validate(cell, 'markdown_cell')
def report_change(self, name, attr, old, new):
self.log.warning(
"Attribute '%s' for cell %s has changed! (should be: %s, got: %s)", attr, name, old, new)
def preprocess_cell(self, cell, resources, cell_index):
grade_id = cell.metadata.get('nbgrader', {}).get('grade_id', None)
if grade_id is None:
return cell, resources
source_cell = self.gradebook.find_source_cell(
grade_id,
self.notebook_id,
self.assignment_id)
# check that the cell type hasn't changed
if cell.cell_type != source_cell.cell_type:
self.report_change(grade_id, "cell_type", source_cell.cell_type, cell.cell_type)
self.update_cell_type(cell, source_cell.cell_type)
# check that the locked status hasn't changed
if utils.is_locked(cell) != source_cell.locked:
self.report_change(grade_id, "locked", source_cell.locked, utils.is_locked(cell))
cell.metadata.nbgrader["locked"] = source_cell.locked
# if it's a grade cell, check that the max score hasn't changed
if utils.is_grade(cell):
grade_cell = self.gradebook.find_grade_cell(
grade_id,
self.notebook_id,
self.assignment_id)
old_points = grade_cell.max_score
new_points = cell.metadata.nbgrader["points"]
if type(old_points)!=type(new_points):
new_points=float(new_points)
old_points=float(old_points)
if old_points != new_points:
self.report_change(grade_id, "points", old_points, new_points)
cell.metadata.nbgrader["points"] = old_points
# always update the checksum, just in case
cell.metadata.nbgrader["checksum"] = source_cell.checksum
# if it's locked, check that the checksum hasn't changed
if source_cell.locked:
old_checksum = source_cell.checksum
new_checksum = utils.compute_checksum(cell)
if old_checksum != new_checksum:
self.report_change(grade_id, "checksum", old_checksum, new_checksum)
cell.source = source_cell.source
# double check the the checksum is correct now
if utils.compute_checksum(cell) != source_cell.checksum:
raise RuntimeError("Inconsistent checksums for cell {}".format(source_cell.name))
return cell, resources
| Python | 0.999827 |
6565e5bd88ebe5fde8d65664041a9e8f571ca7d7 | switch to requests | IMGURdl/downloadIMGUR.py | IMGURdl/downloadIMGUR.py | # example from:
# https://www.toptal.com/python/beginners-guide-to-concurrency-and-parallelism-in-python
import json
import logging
import os
from pathlib import Path
from urllib.request import urlopen, Request
import requests
logger = logging.getLogger(__name__)
def get_links(client_id):
headers = {'Authorization': 'Client-ID {}'.format(client_id)}
url = 'https://api.imgur.com/3/gallery/random/random/'
resp = requests.get(url, headers=headers)
resp.raise_for_status()
data = resp.json()
# req = Request('https://api.imgur.com/3/gallery/random/random/', headers=headers, method='GET')
# with urlopen(req) as resp:
# data = json.loads(resp.read().decode('utf-8'))
return map(lambda item: item['link'], data['data'])
def download_link(directory, link):
logger.info('Downloading %s', link)
download_path = directory / os.path.basename(link)
with urlopen(link) as image, download_path.open('wb') as f:
f.write(image.read())
def setup_download_dir():
download_dir = Path('images')
if not download_dir.exists():
download_dir.mkdir()
return download_dir
| # example from:
# https://www.toptal.com/python/beginners-guide-to-concurrency-and-parallelism-in-python
import json
import logging
import os
from pathlib import Path
from urllib.request import urlopen, Request
# import requests
logger = logging.getLogger(__name__)
def get_links(client_id):
headers = {'Authorization': 'Client-ID {}'.format(client_id)}
url = 'https://api.imgur.com/3/gallery/random/random/'
resp = requests.get(url, headers=headers)
resp.raise_for_status()
data = resp.json()
# req = Request('https://api.imgur.com/3/gallery/random/random/', headers=headers, method='GET')
# with urlopen(req) as resp:
# data = json.loads(resp.read().decode('utf-8'))
return map(lambda item: item['link'], data['data'])
def download_link(directory, link):
logger.info('Downloading %s', link)
download_path = directory / os.path.basename(link)
with urlopen(link) as image, download_path.open('wb') as f:
f.write(image.read())
def setup_download_dir():
download_dir = Path('images')
if not download_dir.exists():
download_dir.mkdir()
return download_dir
| Python | 0.000001 |
59b3d8b5bce596583f5901f1b3b79a883b7b8e55 | Fix stocktake export | InvenTree/stock/admin.py | InvenTree/stock/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from import_export.admin import ImportExportModelAdmin
from import_export.resources import ModelResource
from import_export.fields import Field
import import_export.widgets as widgets
from .models import StockLocation, StockItem, StockItemAttachment
from .models import StockItemTracking
from .models import StockItemTestResult
from build.models import Build
from company.models import Company, SupplierPart
from order.models import PurchaseOrder, SalesOrder
from part.models import Part
class LocationResource(ModelResource):
""" Class for managing StockLocation data import/export """
parent = Field(attribute='parent', widget=widgets.ForeignKeyWidget(StockLocation))
parent_name = Field(attribute='parent__name', readonly=True)
class Meta:
model = StockLocation
skip_unchanged = True
report_skipped = False
clean_model_instances = True
exclude = [
# Exclude MPTT internal model fields
'lft', 'rght', 'tree_id', 'level',
]
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):
super().after_import(dataset, result, using_transactions, dry_run, **kwargs)
# Rebuild the StockLocation tree(s)
StockLocation.objects.rebuild()
class LocationAdmin(ImportExportModelAdmin):
resource_class = LocationResource
list_display = ('name', 'pathstring', 'description')
search_fields = ('name', 'description')
class StockItemResource(ModelResource):
""" Class for managing StockItem data import/export """
# Custom manaegrs for ForeignKey fields
part = Field(attribute='part', widget=widgets.ForeignKeyWidget(Part))
part_name = Field(attribute='part__full_name', readonly=True)
supplier_part = Field(attribute='supplier_part', widget=widgets.ForeignKeyWidget(SupplierPart))
supplier = Field(attribute='supplier_part__supplier__id', readonly=True)
customer = Field(attribute='customer', widget=widgets.ForeignKeyWidget(Company))
supplier_name = Field(attribute='supplier_part__supplier__name', readonly=True)
status_label = Field(attribute='status_label', readonly=True)
location = Field(attribute='location', widget=widgets.ForeignKeyWidget(StockLocation))
location_name = Field(attribute='location__name', readonly=True)
belongs_to = Field(attribute='belongs_to', widget=widgets.ForeignKeyWidget(StockItem))
build = Field(attribute='build', widget=widgets.ForeignKeyWidget(Build))
parent = Field(attribute='parent', widget=widgets.ForeignKeyWidget(StockItem))
sales_order = Field(attribute='sales_order', widget=widgets.ForeignKeyWidget(SalesOrder))
build_order = Field(attribute='build_order', widget=widgets.ForeignKeyWidget(Build))
purchase_order = Field(attribute='purchase_order', widget=widgets.ForeignKeyWidget(PurchaseOrder))
# Date management
updated = Field(attribute='updated', widget=widgets.DateWidget())
stocktake_date = Field(attribute='stocktake_date', widget=widgets.DateWidget())
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):
super().after_import(dataset, result, using_transactions, dry_run, **kwargs)
# Rebuild the StockItem tree(s)
StockItem.objects.rebuild()
class Meta:
model = StockItem
skip_unchanged = True
report_skipped = False
clean_model_instance = True
exclude = [
# Exclude MPTT internal model fields
'lft', 'rght', 'tree_id', 'level',
]
class StockItemAdmin(ImportExportModelAdmin):
resource_class = StockItemResource
list_display = ('part', 'quantity', 'location', 'status', 'updated')
class StockAttachmentAdmin(admin.ModelAdmin):
list_display = ('stock_item', 'attachment', 'comment')
class StockTrackingAdmin(ImportExportModelAdmin):
list_display = ('item', 'date', 'title')
class StockItemTestResultAdmin(admin.ModelAdmin):
list_display = ('stock_item', 'test', 'result', 'value')
admin.site.register(StockLocation, LocationAdmin)
admin.site.register(StockItem, StockItemAdmin)
admin.site.register(StockItemTracking, StockTrackingAdmin)
admin.site.register(StockItemAttachment, StockAttachmentAdmin)
admin.site.register(StockItemTestResult, StockItemTestResultAdmin)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from import_export.admin import ImportExportModelAdmin
from import_export.resources import ModelResource
from import_export.fields import Field
import import_export.widgets as widgets
from .models import StockLocation, StockItem, StockItemAttachment
from .models import StockItemTracking
from .models import StockItemTestResult
from build.models import Build
from company.models import SupplierPart
from order.models import PurchaseOrder, SalesOrder
from part.models import Part
class LocationResource(ModelResource):
""" Class for managing StockLocation data import/export """
parent = Field(attribute='parent', widget=widgets.ForeignKeyWidget(StockLocation))
parent_name = Field(attribute='parent__name', readonly=True)
class Meta:
model = StockLocation
skip_unchanged = True
report_skipped = False
clean_model_instances = True
exclude = [
# Exclude MPTT internal model fields
'lft', 'rght', 'tree_id', 'level',
]
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):
super().after_import(dataset, result, using_transactions, dry_run, **kwargs)
# Rebuild the StockLocation tree(s)
StockLocation.objects.rebuild()
class LocationAdmin(ImportExportModelAdmin):
resource_class = LocationResource
list_display = ('name', 'pathstring', 'description')
search_fields = ('name', 'description')
class StockItemResource(ModelResource):
""" Class for managing StockItem data import/export """
# Custom manaegrs for ForeignKey fields
part = Field(attribute='part', widget=widgets.ForeignKeyWidget(Part))
part_name = Field(attribute='part__full_ame', readonly=True)
supplier_part = Field(attribute='supplier_part', widget=widgets.ForeignKeyWidget(SupplierPart))
supplier = Field(attribute='supplier_part__supplier__id', readonly=True)
supplier_name = Field(attribute='supplier_part__supplier__name', readonly=True)
status_label = Field(attribute='status_label', readonly=True)
location = Field(attribute='location', widget=widgets.ForeignKeyWidget(StockLocation))
location_name = Field(attribute='location__name', readonly=True)
belongs_to = Field(attribute='belongs_to', widget=widgets.ForeignKeyWidget(StockItem))
build = Field(attribute='build', widget=widgets.ForeignKeyWidget(Build))
sales_order = Field(attribute='sales_order', widget=widgets.ForeignKeyWidget(SalesOrder))
build_order = Field(attribute='build_order', widget=widgets.ForeignKeyWidget(Build))
purchase_order = Field(attribute='purchase_order', widget=widgets.ForeignKeyWidget(PurchaseOrder))
# Date management
updated = Field(attribute='updated', widget=widgets.DateWidget())
stocktake_date = Field(attribute='stocktake_date', widget=widgets.DateWidget())
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):
super().after_import(dataset, result, using_transactions, dry_run, **kwargs)
# Rebuild the StockItem tree(s)
StockItem.objects.rebuild()
class Meta:
model = StockItem
skip_unchanged = True
report_skipped = False
clean_model_instance = True
class StockItemAdmin(ImportExportModelAdmin):
resource_class = StockItemResource
list_display = ('part', 'quantity', 'location', 'status', 'updated')
class StockAttachmentAdmin(admin.ModelAdmin):
list_display = ('stock_item', 'attachment', 'comment')
class StockTrackingAdmin(ImportExportModelAdmin):
list_display = ('item', 'date', 'title')
class StockItemTestResultAdmin(admin.ModelAdmin):
list_display = ('stock_item', 'test', 'result', 'value')
admin.site.register(StockLocation, LocationAdmin)
admin.site.register(StockItem, StockItemAdmin)
admin.site.register(StockItemTracking, StockTrackingAdmin)
admin.site.register(StockItemAttachment, StockAttachmentAdmin)
admin.site.register(StockItemTestResult, StockItemTestResultAdmin)
| Python | 0 |
1323154dfbc453959f3d64fef439288004f6461e | add test for SyntaxError on def f(a): global a | Lib/test/test_compile.py | Lib/test/test_compile.py | from test_support import verbose, TestFailed
if verbose:
print 'Running tests on argument handling'
try:
exec('def f(a, a): pass')
raise TestFailed, "duplicate arguments"
except SyntaxError:
pass
try:
exec('def f(a = 0, a = 1): pass')
raise TestFailed, "duplicate keyword arguments"
except SyntaxError:
pass
try:
exec('def f(a): global a; a = 1')
raise TestFailed, "variable is global and local"
except SyntaxError:
pass
| from test_support import verbose, TestFailed
if verbose:
print 'Running test on duplicate arguments'
try:
exec('def f(a, a): pass')
raise TestFailed, "duplicate arguments"
except SyntaxError:
pass
try:
exec('def f(a = 0, a = 1): pass')
raise TestFailed, "duplicate keyword arguments"
except SyntaxError:
pass
| Python | 0.000193 |
69a735cd134723e4d47c02d21f4ff85a65d28148 | enable test_main.py | Lib/test/test_lib2to3.py | Lib/test/test_lib2to3.py | # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor,
test_parser, test_main as test_main_)
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers, test_pytree,test_util, test_refactor, test_parser,
test_main_):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor,
test_parser)
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers, test_pytree,test_util, test_refactor, test_parser):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| Python | 0.000004 |
92c123820be466ad76078537b457bb596b86c338 | Put some meaningful standard includes in the sample configuration | dwight_chroot/config.py | dwight_chroot/config.py | import copy
import os
from .exceptions import (
CannotLoadConfiguration,
InvalidConfiguration,
NotRootException,
UnknownConfigurationOptions,
)
from .include import Include
_USER_CONFIG_FILE_PATH = os.path.expanduser("~/.dwightrc")
_USER_CONFIG_FILE_TEMPLATE = """# AUTOGENERATED DEFAULT CONFIG
# ROOT_IMAGE = "/some/path/here"
# INCLUDES = [Include("/proc", "/proc/"),
# Include("/dev", "/dev/"),
# Include("/dev/pts", "/dev/pts"),
# Include("/run", "/run"),
# Include("/sys", "/sys"),
# Include("/home", "/home/"),
# Include("/etc/passwd", "/etc/passwd"),
# Include("/etc/group", "/etc/group")]
# ENVIRON = {}
# UID = None # None means taking the uid from SUDO_UID
# PWD = os.path.abspath(".")
# NUM_LOOP_DEVICES = 64 # The number of loop to ensure that exist before chrooting
"""
class DwightConfiguration(object):
def __init__(self):
super(DwightConfiguration, self).__init__()
self._config = dict(
ROOT_IMAGE = None,
INCLUDES = [],
ENVIRON = {},
GID = None,
UID = None,
PWD = os.path.abspath("."),
NUM_LOOP_DEVICES = None,
)
self._known_keys = set(self._config)
def __getitem__(self, key):
return self._config[key]
def __setitem__(self, key, value):
if key not in self._known_keys:
raise UnknownConfigurationOptions("Unknown configuration option: {0!r}".format(key))
self._config[key] = value
def process_user_config_file(self, user_config_file_path=_USER_CONFIG_FILE_PATH):
if not os.path.isfile(user_config_file_path):
self._ensure_user_config_file(user_config_file_path)
with open(user_config_file_path) as user_config_file:
self.load_from_string(user_config_file)
def _ensure_user_config_file(self, user_config_file_path):
if not os.path.isdir(os.path.dirname(user_config_file_path)):
os.makedirs(os.path.dirname(user_config_file_path))
with open(user_config_file_path, "w") as user_config_file:
user_config_file.write(_USER_CONFIG_FILE_TEMPLATE)
def _update_config(self, config):
append_keys = ['INCLUDES']
appended_items = dict((append_key, config[append_key] + self._config[append_key])
for append_key in append_keys)
self._config.update(config)
self._config.update(appended_items)
def load_from_string(self, s):
d = copy.deepcopy(self._config)
try:
exec(s, {"Include" : Include}, d)
except Exception as e:
raise CannotLoadConfiguration("Cannot load configuration ({0})".format(e))
for key in list(d):
if key.startswith("_") or not key[0].isupper():
d.pop(key)
self._check_unknown_parameters(d)
self._update_config(d)
def _check_unknown_parameters(self, d):
unknown = set(d) - self._known_keys
if unknown:
raise UnknownConfigurationOptions("Unknown configuration options: {0}".format(", ".join(map(repr, unknown))))
def check(self):
if self._config.get("ROOT_IMAGE", None) is None:
raise InvalidConfiguration("ROOT_IMAGE option is not set")
| import copy
import os
from .exceptions import (
CannotLoadConfiguration,
InvalidConfiguration,
NotRootException,
UnknownConfigurationOptions,
)
from .include import Include
_USER_CONFIG_FILE_PATH = os.path.expanduser("~/.dwightrc")
_USER_CONFIG_FILE_TEMPLATE = """# AUTOGENERATED DEFAULT CONFIG
# ROOT_IMAGE = "/some/path/here"
# INCLUDES = [Include("/dest/path/in/chroot", "/host/path")]
# ENVIRON = {}
# UID = None # None means taking the uid from SUDO_UID
# PWD = os.path.abspath(".")
"""
class DwightConfiguration(object):
def __init__(self):
super(DwightConfiguration, self).__init__()
self._config = dict(
ROOT_IMAGE = None,
INCLUDES = [],
ENVIRON = {},
GID = None,
UID = None,
PWD = os.path.abspath("."),
NUM_LOOP_DEVICES = None,
)
self._known_keys = set(self._config)
def __getitem__(self, key):
return self._config[key]
def __setitem__(self, key, value):
if key not in self._known_keys:
raise UnknownConfigurationOptions("Unknown configuration option: {0!r}".format(key))
self._config[key] = value
def process_user_config_file(self, user_config_file_path=_USER_CONFIG_FILE_PATH):
if not os.path.isfile(user_config_file_path):
self._ensure_user_config_file(user_config_file_path)
with open(user_config_file_path) as user_config_file:
self.load_from_string(user_config_file)
def _ensure_user_config_file(self, user_config_file_path):
if not os.path.isdir(os.path.dirname(user_config_file_path)):
os.makedirs(os.path.dirname(user_config_file_path))
with open(user_config_file_path, "w") as user_config_file:
user_config_file.write(_USER_CONFIG_FILE_TEMPLATE)
def _update_config(self, config):
append_keys = ['INCLUDES']
appended_items = dict((append_key, config[append_key] + self._config[append_key])
for append_key in append_keys)
self._config.update(config)
self._config.update(appended_items)
def load_from_string(self, s):
d = copy.deepcopy(self._config)
try:
exec(s, {"Include" : Include}, d)
except Exception as e:
raise CannotLoadConfiguration("Cannot load configuration ({0})".format(e))
for key in list(d):
if key.startswith("_") or not key[0].isupper():
d.pop(key)
self._check_unknown_parameters(d)
self._update_config(d)
def _check_unknown_parameters(self, d):
unknown = set(d) - self._known_keys
if unknown:
raise UnknownConfigurationOptions("Unknown configuration options: {0}".format(", ".join(map(repr, unknown))))
def check(self):
if self._config.get("ROOT_IMAGE", None) is None:
raise InvalidConfiguration("ROOT_IMAGE option is not set")
| Python | 0 |
ae23c81ee18726755ed770d1d3654e50d28fb028 | Update views.py | chat/views.py | chat/views.py | from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.contrib import auth
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.views.decorators.csrf import csrf_exempt
import json
from chat.models import Message, ChatUser
from django.contrib.auth.models import User
import datetime
from django.utils.timezone import now as utcnow
def index(request):
if request.method == 'POST':
print request.POST
logged_users = []
if request.user.username and request.user.profile.is_chat_user:
context = {'logged_users':logged_users}
return render(request, 'djangoChat/index.html', context)
else:
return HttpResponseRedirect(reverse('login'))
def login(request):
if request.user.username and request.user.profile.is_chat_user:
return HttpResponseRedirect(reverse('index'))
context = {'error':''}
if request.method == 'POST':
username = request.POST.get('username','') #retunr '' if no username
password = request.POST.get('password','')
user = auth.authenticate(username=username,password=password)
if user is not None:
auth.login(request,user)
cu = request.user.profile
cu.is_chat_user = True
cu.last_accessed = utcnow()
cu.save()
return HttpResponseRedirect(reverse('index'))
else:
context['error'] = ' wrong credentials try again'
return render(request,'djangoChat/login.html',context)
context.update(csrf(request))
return render(request,'djangoChat/login.html',context)
def logout(request):
cu = request.user.profile
cu.is_chat_user = False
cu.save()
return HttpResponse('succesfully logged out of chat')
@csrf_exempt
def chat_api(request):
if request.method == 'POST':
d = json.loads(request.body)
msg = d.get('msg')
user = request.user.username
gravatar = request.user.profile.gravatar_url
m = Message(user=user,message=msg,gravatar=gravatar)
m.save()
res = {'id':m.id,'msg':m.message,'user':m.user,'time':m.time.strftime('%I:%M:%S %p').lstrip('0'),'gravatar':m.gravatar}
data = json.dumps(res)
return HttpResponse(data,content_type="application/json")
# get request
r = Message.objects.order_by('-time')[:70]
res = []
for msgs in reversed(r) :
res.append({'id':msgs.id,'user':msgs.user,'msg':msgs.message,'time':msgs.time.strftime('%I:%M:%S %p').lstrip('0'),'gravatar':msgs.gravatar})
data = json.dumps(res)
return HttpResponse(data,content_type="application/json")
def logged_chat_users(request):
u = ChatUser.objects.filter(is_chat_user=True)
for j in u:
elapsed = utcnow() - j.last_accessed
if elapsed > datetime.timedelta(seconds=35):
j.is_chat_user = False
j.save()
uu = ChatUser.objects.filter(is_chat_user=True)
d = []
for i in uu:
d.append({'username': i.username,'gravatar':i.gravatar_url,'id':i.userID})
data = json.dumps(d)
return HttpResponse(data,content_type="application/json")
def update_time(request):
if request.user.username:
u = request.user.profile
u.last_accessed = utcnow()
u.is_chat_user = True
u.save()
return HttpResponse('updated')
return HttpResponse('who are you?')
| from django.shortcuts import render
# Create your views here.
| Python | 0 |
3bfd3ea70980acc02bf35b1654b6c616f4af45ec | update error page | chat/views.py | chat/views.py | import random
import string
from django.db import transaction
from django.shortcuts import render, redirect
import haikunator
from .models import Room
def about(request):
return render(request, "chat/about.html")
def home(request):
return render(request, "chat/about.html")
# def new_room(request):
# """
# Randomly create a new room, and redirect to it.
# """
# new_room = None
# while not new_room:
# with transaction.atomic():
# label = haikunator.haikunate()
# if Room.objects.filter(label=label).exists():
# continue
# new_room = Room.objects.create(label=label)
# return redirect(chat_room, label=label)
def create_room(request):
#Create a new room for lang ren sha
#
if request.method == 'GET':
return render(request, "chat/create_room.html", {})
else:
label = request.POST['id']
if Room.objects.filter(label=label).exists():
return render(request, "chat/error.html", {'messages' : 'this name has been used'})
playNumber = 0
roleList = request.POST['cunmin'] + ',' + request.POST['langren']
playNumber = playNumber + int(request.POST['cunmin']) + int(request.POST['langren'])
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
gameStart = 0
new_room = Room.objects.create(label=label, gameStart=gameStart, playerNumber=playNumber, roleList=roleList)
return redirect(chat_room, label=label)
def join_room(request):
#Create a new room for lang ren sha
#
if request.method == 'GET':
return render(request, "chat/join_room.html", {})
label = request.POST['label']
return redirect(chat_room, label=label)
def chat_room(request, label):
"""
Room view - show the room, with latest messages.
The template for this view has the WebSocket business to send and stream
messages, so see the template for where the magic happens.
"""
# If the room with the given label doesn't exist, automatically create it
# upon first visit (a la etherpad).
room = Room.objects.filter(label=label).first()
# We want to show the last 50 messages, ordered most-recent-last
messages = reversed(room.messages.order_by('-timestamp')[:50])
return render(request, "chat/room.html", {
'room': room,
'messages': messages,
})
| import random
import string
from django.db import transaction
from django.shortcuts import render, redirect
import haikunator
from .models import Room
def about(request):
return render(request, "chat/about.html")
def home(request):
return render(request, "chat/about.html")
# def new_room(request):
# """
# Randomly create a new room, and redirect to it.
# """
# new_room = None
# while not new_room:
# with transaction.atomic():
# label = haikunator.haikunate()
# if Room.objects.filter(label=label).exists():
# continue
# new_room = Room.objects.create(label=label)
# return redirect(chat_room, label=label)
def create_room(request):
#Create a new room for lang ren sha
#
if request.method == 'GET':
return render(request, "chat/create_room.html", {})
else:
label = request.POST['id']
if Room.objects.filter(label=label).exists():
return render(request, "chat/error.html", {'messages' : 'this name has been used'})
playNumber = 0
roleList = request.POST['cunmin'] + ',' + request.POST['langren']
playNumber = playNumber + int(request.POST['cunmin']) + int(request.POST['langren'])
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
if request.POST.get('nvwu', False):
roleList = roleList + ',' + '1'
playNumber = playNumber + 1
else:
roleList = roleList + ',' + '0'
gameStart = 0
new_room = Room.objects.create(label=label, gameStart=gameStart, playerNumber=playNumber, roleList=roleList)
return redirect(chat_room, label=label)
def join_room(request):
#Create a new room for lang ren sha
#
label = request.POST['label']
return redirect(chat_room, label=label)
def chat_room(request, label):
"""
Room view - show the room, with latest messages.
The template for this view has the WebSocket business to send and stream
messages, so see the template for where the magic happens.
"""
# If the room with the given label doesn't exist, automatically create it
# upon first visit (a la etherpad).
room = Room.objects.filter(label=label).first()
# We want to show the last 50 messages, ordered most-recent-last
messages = reversed(room.messages.order_by('-timestamp')[:50])
return render(request, "chat/room.html", {
'room': room,
'messages': messages,
})
| Python | 0.000001 |
e5a3a49cfe6953160e6e3fbdf1ce9f55dafb2b40 | Change data-checker messages to use Python logger | check_data.py | check_data.py | #!/usr/bin/env python
import argparse
import logging
import os
import sys
from six import string_types
import yaml
# ABCs moved in Python 3, but six doesn't keep track of them.
try:
from collections.abc import Sequence
except ImportError:
from collections import Sequence
REPO_ROOT = os.path.dirname(__file__)
parser = argparse.ArgumentParser(
description='Verify the format of a '
'Welcome to Night Vale episode data file')
parser.add_argument(
'--data_file', '-d',
type=argparse.FileType('r'),
default=os.path.join(REPO_ROOT, 'episode_info.yaml'),
help='YAML file with episode segment information')
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(message)s')
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
def check_overall_data_type(all_episode_data):
"""
The file should describe a list or other sequence.
"""
ok = (
isinstance(all_episode_data, Sequence) and
not isinstance(all_episode_data, string_types))
if not ok:
raise TypeError('Top-level data structure is not a list')
def check_required_segment_data(segment):
"""
Make sure the segment has all required fields.
"""
try:
title = segment['title']
except KeyError:
raise KeyError('Segment is missing its title')
if not isinstance(title, string_types):
raise TypeError('Segment title must be a string')
try:
start = segment['start']
except KeyError:
raise KeyError('Segment is missing its start time')
if not isinstance(start, Sequence):
raise TypeError('Segment start time must be a list of length 2')
if len(start) < 2:
raise TypeError('Segment start time must have two elements')
try:
start_minutes = float(start[0])
except ValueError:
raise TypeError('Segment start minute must be castable to float')
if start_minutes < 0:
raise ValueError('Segment start minute must not be negative')
try:
start_seconds = float(start[1])
except ValueError:
raise TypeError('Segment start second must be castable to float')
if start_seconds < 0:
raise ValueError('Segment start second must not be negative')
def check_required_episode_data(episode):
"""
Make sure the episode has all required fields.
"""
try:
episode_number = episode['episode_number']
except KeyError:
raise KeyError('Episode is missing its episode number')
if not (
isinstance(episode_number, int) or
isinstance(episode_number, string_types)):
raise TypeError('Episode number must be a string or an integer')
try:
title = episode['title']
except KeyError:
raise KeyError('Episode is missing its title')
if not isinstance(title, string_types):
raise TypeError('Episode title must be a string')
try:
mp3_url = episode['mp3_url']
except KeyError:
raise KeyError('Episode is missing its MP3 URL')
if not isinstance(mp3_url, string_types):
raise TypeError('Episode MP3 URL must be a string')
try:
segments = episode['segments']
except KeyError:
raise KeyError('Episode is missing its segments')
if not isinstance(segments, Sequence):
raise TypeError('Episode MP3 URL must be a list')
if not segments:
raise ValueError('Episode must have at least one segment')
for segment in segments:
check_required_segment_data(segment)
logger.info(' Segment data OK for "{title}"'.format(**segment))
def main(*args):
script_args = parser.parse_args(args)
all_episode_data = yaml.safe_load(script_args.data_file)
check_overall_data_type(all_episode_data)
logger.info('Overall data type OK\n')
for episode in all_episode_data:
check_required_episode_data(episode)
logger.info('Episode data OK for "{title}"\n'.format(**episode))
logger.info('All OK!')
if __name__ == '__main__':
main(*sys.argv[1:])
| #!/usr/bin/env python
from __future__ import print_function
import argparse
import os
import sys
from six import string_types
import yaml
# ABCs moved in Python 3, but six doesn't keep track of them.
try:
from collections.abc import Sequence
except ImportError:
from collections import Sequence
REPO_ROOT = os.path.dirname(__file__)
parser = argparse.ArgumentParser(
description='Verify the format of a '
'Welcome to Night Vale episode data file')
parser.add_argument(
'--data_file', '-d',
type=argparse.FileType('r'),
default=os.path.join(REPO_ROOT, 'episode_info.yaml'),
help='YAML file with episode segment information')
def check_overall_data_type(all_episode_data):
"""
The file should describe a list or other sequence.
"""
ok = (
isinstance(all_episode_data, Sequence) and
not isinstance(all_episode_data, string_types))
if not ok:
raise TypeError('Top-level data structure is not a list')
def check_required_segment_data(segment):
"""
Make sure the segment has all required fields.
"""
try:
title = segment['title']
except KeyError:
raise KeyError('Segment is missing its title')
if not isinstance(title, string_types):
raise TypeError('Segment title must be a string')
try:
start = segment['start']
except KeyError:
raise KeyError('Segment is missing its start time')
if not isinstance(start, Sequence):
raise TypeError('Segment start time must be a list of length 2')
if len(start) < 2:
raise TypeError('Segment start time must have two elements')
try:
start_minutes = float(start[0])
except ValueError:
raise TypeError('Segment start minute must be castable to float')
if start_minutes < 0:
raise ValueError('Segment start minute must not be negative')
try:
start_seconds = float(start[1])
except ValueError:
raise TypeError('Segment start second must be castable to float')
if start_seconds < 0:
raise ValueError('Segment start second must not be negative')
def check_required_episode_data(episode):
"""
Make sure the episode has all required fields.
"""
try:
episode_number = episode['episode_number']
except KeyError:
raise KeyError('Episode is missing its episode number')
if not (
isinstance(episode_number, int) or
isinstance(episode_number, string_types)):
raise TypeError('Episode number must be a string or an integer')
try:
title = episode['title']
except KeyError:
raise KeyError('Episode is missing its title')
if not isinstance(title, string_types):
raise TypeError('Episode title must be a string')
try:
mp3_url = episode['mp3_url']
except KeyError:
raise KeyError('Episode is missing its MP3 URL')
if not isinstance(mp3_url, string_types):
raise TypeError('Episode MP3 URL must be a string')
try:
segments = episode['segments']
except KeyError:
raise KeyError('Episode is missing its segments')
if not isinstance(segments, Sequence):
raise TypeError('Episode MP3 URL must be a list')
if not segments:
raise ValueError('Episode must have at least one segment')
for segment in segments:
check_required_segment_data(segment)
print(' Segment data OK for "{title}"'.format(**segment))
def main(*args):
script_args = parser.parse_args(args)
all_episode_data = yaml.safe_load(script_args.data_file)
check_overall_data_type(all_episode_data)
print('Overall data type OK\n')
for episode in all_episode_data:
check_required_episode_data(episode)
print('Episode data OK for "{title}"\n'.format(**episode))
print('All OK!')
if __name__ == '__main__':
main(*sys.argv[1:])
| Python | 0 |
a6acf8a68ee5b2ef185f279b6169a34c2b70896d | Increase feature version | acmd/__init__.py | acmd/__init__.py | # coding: utf-8
""" aem-cmd main module. """
__version__ = '0.12.0b'
# Standard error codes that can be returned from any tool.
OK = 0
UNCHANGED = 1
USER_ERROR = 4711
CONFIG_ERROR = 4712
SERVER_ERROR = 4713
INTERNAL_ERROR = 4714
import acmd.logger
init_log = acmd.logger.init_log
log = acmd.logger.log
warning = acmd.logger.warning
error = acmd.logger.error
import acmd.server
Server = acmd.server.Server
import acmd.config
read_config = acmd.config.read_config
get_rcfilename = acmd.config.get_rcfilename
import acmd.deploy
setup_rcfile = acmd.deploy.setup_rcfile
deploy_bash_completion = acmd.deploy.deploy_bash_completion
get_current_version = acmd.deploy.get_current_version
import acmd.props
parse_properties = acmd.props.parse_properties
import acmd.repo
tool_repo = acmd.repo.tool_repo
tool = acmd.repo.tool
import_projects = acmd.repo.import_projects
| # coding: utf-8
""" aem-cmd main module. """
__version__ = '0.11.1b'
# Standard error codes that can be returned from any tool.
OK = 0
UNCHANGED = 1
USER_ERROR = 4711
CONFIG_ERROR = 4712
SERVER_ERROR = 4713
INTERNAL_ERROR = 4714
import acmd.logger
init_log = acmd.logger.init_log
log = acmd.logger.log
warning = acmd.logger.warning
error = acmd.logger.error
import acmd.server
Server = acmd.server.Server
import acmd.config
read_config = acmd.config.read_config
get_rcfilename = acmd.config.get_rcfilename
import acmd.deploy
setup_rcfile = acmd.deploy.setup_rcfile
deploy_bash_completion = acmd.deploy.deploy_bash_completion
get_current_version = acmd.deploy.get_current_version
import acmd.props
parse_properties = acmd.props.parse_properties
import acmd.repo
tool_repo = acmd.repo.tool_repo
tool = acmd.repo.tool
import_projects = acmd.repo.import_projects
| Python | 0 |
61c9d4f6798d81d6ae6d2e5641a8432121de52fa | Implement function: get_network_adapters | cloudbaseinit/osutils/freebsd.py | cloudbaseinit/osutils/freebsd.py | from cloudbaseinit.osutils import base
import subprocess
class FreeBSDUtils(base.BaseOSUtils):
def reboot(self):
if ( os.system('reboot') != 0 ):
raise Exception('Reboot failed')
def user_exists(self, username):
try:
subprocess.check_output(["id", username])
except CalledProcessError:
return False
return True
# not completed
def create_user(self, username, password, invite_group, password_expires=False):
"""
invite_group must be a list of string.
"""
home_dir = '/home/' + username
user_shell = '/bin/tcsh'
user_comment = 'Created by bsdcloud-init'
grouplist = ''
assert isinstance(invite_group, list), "invite_group must be a list."
assert invite_group, "invite_group cannot be empty."
for i in invite_group:
grouplist += i+','
grouplist = grouplist[:-1]
pw_cmd = "echo " + password + " | pw useradd -n " + username + " -c '" + user_comment + "' -d '" + user_shell + "' -s /bin/tcsh -h 0 -G " + grouplist
subprocess.check_call(pw_cmd, shell=True)
subprocess.check_call("mkdir %s" % (home_dir), shell=True)
subprocess.check_call("chown -R %s:%s %s" % (username, username, home_dir), shell=True)
def set_host_name(self, new_host_name):
try:
subprocess.check_output(["hostname", new_host_name])
cmd_newhost = "[ -z `egrep '^hostname' /etc/rc.conf` ] && { echo 'hostname=\"%s\"' >> /etc/rc.conf } || { sed -e 's/^hostname=.*$/hostname=\"%s\"/' -I '' /etc/rc.conf }" % (new_host_name, new_host_name)
subprocess.check_output(cmd_newhost, shell=True)
return False
except CalledProcessError:
raise Exception(CalledProcessError.output)
def sanitize_shell_input(self, value):
pass
def set_user_password(self, username, password):
pw_cmd = "echo " + password + " | pw usermod -n " + username + " -h 0"
subprocess.check_call(pw_cmd, shell=True)
def add_user_to_local_group(self, username, groupname):
pass
def get_user_home(self, username):
pass
def get_network_adapters(self):
"""
This fucntion will return a list of interface.
"""
if_list = subprocess.check_output(['ifconfig', '-l']).split(' ')
return if_list
def set_static_network_config(self, adapter_name, address, netmask,
broadcast, gateway, dnsdomain,
dnsnameservers):
pass
def set_config_value(self, name, value, section=None):
pass
def get_config_value(self, name, section=None):
pass
def wait_for_boot_completion(self):
pass
def terminate(self):
pass
def get_default_gateway(self):
"""
We cannot handle mutiple default gateway.
"""
interface = subprocess.check_output("route get default | grep interface", shell=True).split()[1]
gateway_ip = subprocess.check_output("route get default | grep gateway", shell=True).split()[1]
return (interface, gateway_ip)
def check_static_route_exists(self, destination):
pass
def add_static_route(self, destination, mask, next_hop, interface_index,
metric):
pass
def get_os_version(self):
pass
def get_volume_label(self, drive):
pass
| from cloudbaseinit.osutils import base
import subprocess
class FreeBSDUtils(base.BaseOSUtils):
def reboot(self):
if ( os.system('reboot') != 0 ):
raise Exception('Reboot failed')
def user_exists(self, username):
try:
subprocess.check_output(["id", username])
except CalledProcessError:
return False
return True
# not completed
def create_user(self, username, password, invite_group, password_expires=False):
"""
invite_group must be a list of string.
"""
home_dir = '/home/' + username
user_shell = '/bin/tcsh'
user_comment = 'Created by bsdcloud-init'
grouplist = ''
assert isinstance(invite_group, list), "invite_group must be a list."
assert invite_group, "invite_group cannot be empty."
for i in invite_group:
grouplist += i+','
grouplist = grouplist[:-1]
pw_cmd = "echo " + password + " | pw useradd -n " + username + " -c '" + user_comment + "' -d '" + user_shell + "' -s /bin/tcsh -h 0 -G " + grouplist
subprocess.check_call(pw_cmd, shell=True)
subprocess.check_call("mkdir %s" % (home_dir), shell=True)
subprocess.check_call("chown -R %s:%s %s" % (username, username, home_dir), shell=True)
def set_host_name(self, new_host_name):
try:
subprocess.check_output(["hostname", new_host_name])
cmd_newhost = "[ -z `egrep '^hostname' /etc/rc.conf` ] && { echo 'hostname=\"%s\"' >> /etc/rc.conf } || { sed -e 's/^hostname=.*$/hostname=\"%s\"/' -I '' /etc/rc.conf }" % (new_host_name, new_host_name)
subprocess.check_output(cmd_newhost, shell=True)
return False
except CalledProcessError:
raise Exception(CalledProcessError.output)
def sanitize_shell_input(self, value):
pass
def set_user_password(self, username, password):
pw_cmd = "echo " + password + " | pw usermod -n " + username + " -h 0"
subprocess.check_call(pw_cmd, shell=True)
def add_user_to_local_group(self, username, groupname):
pass
def get_user_home(self, username):
pass
def get_network_adapters(self):
pass
def set_static_network_config(self, adapter_name, address, netmask,
broadcast, gateway, dnsdomain,
dnsnameservers):
pass
def set_config_value(self, name, value, section=None):
pass
def get_config_value(self, name, section=None):
pass
def wait_for_boot_completion(self):
pass
def terminate(self):
pass
def get_default_gateway(self):
"""
We cannot handle mutiple default gateway.
"""
interface = subprocess.check_output("route get default | grep interface", shell=True).split()[1]
gateway_ip = subprocess.check_output("route get default | grep gateway", shell=True).split()[1]
return (interface, gateway_ip)
def check_static_route_exists(self, destination):
pass
def add_static_route(self, destination, mask, next_hop, interface_index,
metric):
pass
def get_os_version(self):
pass
def get_volume_label(self, drive):
pass
| Python | 0.999999 |
deb96907dc9c96e0ff8772d14cad765cc5e47602 | improve crawler | crawler/pagecrawler/pagecrawler/spiders/article_spider.py | crawler/pagecrawler/pagecrawler/spiders/article_spider.py | import scrapy
from pagecrawler.items import ArticlecrawlerItem
from pagecrawler.model_article import Articles
class ArticleSpider(scrapy.Spider):
name = "articlespider"
filename = "delicious_article_dataset.dat"
# load url in bookmarks from dataset
start_urls = []
crawled_urls = {}
# url_count = 100
counter = 0
written_counter = 0
with open(filename, 'r') as f:
for row in f:
# url_count -= 1
# if url_count <= 0:
# break
fields = row.split("\t")
if not fields[3].startswith("http"):
continue
start_urls.append(fields[3])
print "field:" + fields[3]
print start_urls
def parse(self, response):
items = []
return self.parse_article(response)
def parse_article(self, response):
self.log("==========Scraping:========= " + response.url)
item = ArticlecrawlerItem()
ArticleSpider.counter += 1
item['link'] = response.url
item['title'] = response.xpath('//title/text()').extract()
item['summary'] = response.xpath('//meta[@name="description"]/@content').extract()
item['keywords'] = response.xpath('//meta[@name="news_keywords"]/@content').extract()
item['text'] = response.xpath('//body//p//text()').extract()
self.log("=========filled in item for:========" + response.url)
# e.g. "indexing function", link = item.[]('link')
if len(item['title']) == 0:
return
title = item['title'][0]
link = response.url.lower()
if link.startswith("https://www.youtube.com/"):
return
if link in ArticleSpider.crawled_urls:
return
else:
ArticleSpider.crawled_urls[link] = True
if len(item['summary']) == 0:
return
summary = item['summary'][0].rstrip('\r\n')
if len(summary) == 0:
return
keywords = ""
if len(item['keywords']) > 0:
keywords = ', '.join(item['keywords'])
if len(item['text']) == 0:
return
text = ' '.join(item['text'])
if len(text) < 10:
return
print "createing article"
ArticleSpider.written_counter += 1
article = Articles.create(title=title, link=link, summary=summary, keywords=keywords, text=text)
print "########################### " + str(ArticleSpider.counter) + "/" + str(len(ArticleSpider.start_urls)) + " written " + str(ArticleSpider.written_counter) + " #################################"
# yield item
| import scrapy
from pagecrawler.items import ArticlecrawlerItem
from pagecrawler.model_article import Articles
class ArticleSpider(scrapy.Spider):
name = "articlespider"
filename = "delicious_article_dataset.dat"
# load url in bookmarks from dataset
start_urls = []
crawled_urls = {}
# url_count = 100
counter = 0
with open(filename, 'r') as f:
for row in f:
# url_count -= 1
# if url_count <= 0:
# break
fields = row.split("\t")
if not fields[3].startswith("http"):
continue
start_urls.append(fields[3])
print "field:" + fields[3]
print start_urls
def parse(self, response):
items = []
return self.parse_article(response)
def parse_article(self, response):
self.log("==========Scraping:========= " + response.url)
item = ArticlecrawlerItem()
ArticleSpider.counter += 1
item['link'] = response.url
item['title'] = response.xpath('//title/text()').extract()
item['summary'] = response.xpath('//meta[@name="description"]/@content').extract()
item['keywords'] = response.xpath('//meta[@name="news_keywords"]/@content').extract()
item['text'] = response.xpath('//body//p//text()').extract()
self.log("=========filled in item for:========" + response.url)
# e.g. "indexing function", link = item.[]('link')
if len(item['title']) == 0:
return
title = item['title'][0]
link = response.url.lower()
if link.startswith("https://www.youtube.com/"):
return
if link in ArticleSpider.crawled_urls:
return
else:
ArticleSpider.crawled_urls[link] = True
if len(item['summary']) == 0:
return
summary = item['summary'][0].rstrip('\r\n')
if len(summary) == 0:
return
keywords = ""
if len(item['keywords']) > 0:
keywords = ', '.join(item['keywords'])
if len(item['text']) == 0:
return
text = ' '.join(item['text'])
if len(text) < 10:
return
print "createing article"
article = Articles.create(title=title, link=link, summary=summary, keywords=keywords, text=text)
print "#################################" + str(ArticleSpider.counter) + "/" + str(len(ArticleSpider.start_urls)) + "###########################"
# yield item
| Python | 0.000129 |
39cd42fa27b87b9b1604635236f8860759a4a8db | Set Dialog's orientation to vertical | ELiDE/ELiDE/dialog.py | ELiDE/ELiDE/dialog.py | """Generic dialog boxes and menus, for in front of a Board
Mostly these will be added as children of KvLayoutFront but you
could use them independently if you wanted.
"""
from kivy.properties import DictProperty, ListProperty, StringProperty, NumericProperty, VariableListProperty
from kivy.core.text import DEFAULT_FONT
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.widget import Widget
from kivy.lang import Builder
class Box(Widget):
padding = VariableListProperty([6, 6, 6, 6])
border = ListProperty([4, 4, 4, 4])
font_size = StringProperty('15sp')
font_name = StringProperty(DEFAULT_FONT)
background = StringProperty(
'atlas://data/images/defaulttheme/textinput')
background_color = ListProperty([1, 1, 1, 1])
foreground_color = ListProperty([0, 0, 0, 1])
class MessageBox(Box):
"""Looks like a TextInput but doesn't accept any input.
Does support styled text with BBcode.
"""
line_spacing = NumericProperty(0)
text = StringProperty()
class DialogMenu(Box, BoxLayout):
"""Some buttons that make the game do things."""
options = ListProperty()
"""List of pairs of (button_text, partial)"""
funcs = DictProperty({})
"""Dict of functions to be used in place of string partials in the options"""
def on_options(self, *args):
self.clear_widgets()
for txt, part in self.options:
if not callable(part):
part = self.funcs[part]
self.add_widget(Button(text=txt, on_press=part))
class Dialog(BoxLayout):
"""MessageBox with a DialogMenu beneath it"""
message_kwargs = DictProperty({})
menu_kwargs = DictProperty({})
def on_message_kwargs(self, *args):
for k, v in self.message_kwargs.items():
setattr(self.ids.msg, k, v)
def on_menu_kwargs(self, *args):
for k, v in self.menu_kwargs.items():
setattr(self.ids.menu, k, v)
Builder.load_string("""
<MessageBox>:
canvas.before:
Color:
rgba: self.background_color
BorderImage:
border: self.border
pos: self.pos
size: self.size
source: self.background
ScrollView:
id: sv
do_scroll_x: False
size: root.width - root.border[1] - root.border[3], root.height - root.border[0] - root.border[2]
Label:
markup: True
text: root.text
font_name: root.font_name
font_size: root.font_size
line_spacing: root.line_spacing
width: sv.width
size_hint_y: None
text_size: self.size
<DialogMenu>:
canvas.before:
Color:
rgba: self.background_color
BorderImage:
border: self.border
pos: self.pos
size: self.size
source: self.background
orientation: 'vertical'
<Dialog>:
orientation: 'vertical'
MessageBox:
id: msg
ScrollView:
DialogMenu:
size_hint_y: None
id: menu
""")
if __name__ == "__main__":
from kivy.base import runTouchApp
dia = Dialog(
message_kwargs={'text': 'I am a dialog'},
menu_kwargs={'options': [('one', lambda: None), ('two', lambda: None)]}
) | """Generic dialog boxes and menus, for in front of a Board
Mostly these will be added as children of KvLayoutFront but you
could use them independently if you wanted.
"""
from kivy.properties import DictProperty, ListProperty, StringProperty, NumericProperty, VariableListProperty
from kivy.core.text import DEFAULT_FONT
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.widget import Widget
from kivy.lang import Builder
class Box(Widget):
padding = VariableListProperty([6, 6, 6, 6])
border = ListProperty([4, 4, 4, 4])
font_size = StringProperty('15sp')
font_name = StringProperty(DEFAULT_FONT)
background = StringProperty(
'atlas://data/images/defaulttheme/textinput')
background_color = ListProperty([1, 1, 1, 1])
foreground_color = ListProperty([0, 0, 0, 1])
class MessageBox(Box):
"""Looks like a TextInput but doesn't accept any input.
Does support styled text with BBcode.
"""
line_spacing = NumericProperty(0)
text = StringProperty()
class DialogMenu(Box, BoxLayout):
"""Some buttons that make the game do things."""
options = ListProperty()
"""List of pairs of (button_text, partial)"""
funcs = DictProperty({})
"""Dict of functions to be used in place of string partials in the options"""
def on_options(self, *args):
self.clear_widgets()
for txt, part in self.options:
if not callable(part):
part = self.funcs[part]
self.add_widget(Button(text=txt, on_press=part))
class Dialog(BoxLayout):
"""MessageBox with a DialogMenu beneath it"""
message_kwargs = DictProperty({})
menu_kwargs = DictProperty({})
def on_message_kwargs(self, *args):
for k, v in self.message_kwargs.items():
setattr(self.ids.msg, k, v)
def on_menu_kwargs(self, *args):
for k, v in self.menu_kwargs.items():
setattr(self.ids.menu, k, v)
Builder.load_string("""
<MessageBox>:
canvas.before:
Color:
rgba: self.background_color
BorderImage:
border: self.border
pos: self.pos
size: self.size
source: self.background
ScrollView:
id: sv
do_scroll_x: False
size: root.width - root.border[1] - root.border[3], root.height - root.border[0] - root.border[2]
Label:
markup: True
text: root.text
font_name: root.font_name
font_size: root.font_size
line_spacing: root.line_spacing
width: sv.width
size_hint_y: None
text_size: self.size
<DialogMenu>:
canvas.before:
Color:
rgba: self.background_color
BorderImage:
border: self.border
pos: self.pos
size: self.size
source: self.background
orientation: 'vertical'
<Dialog>:
MessageBox:
id: msg
ScrollView:
DialogMenu:
size_hint_y: None
id: menu
""")
if __name__ == "__main__":
from kivy.base import runTouchApp
dia = Dialog(
message_kwargs={'text': 'I am a dialog'},
menu_kwargs={'options': [('one', lambda: None), ('two', lambda: None)]}
) | Python | 0 |
d1c77a8ce0b7c4b1957e443a88bb797450e6f1df | Revert "Al fer el canvi d'id a id_attachment m'havia deixat una cosa" | filtres/filtre.py | filtres/filtre.py | import base64
import logging
logger = logging.getLogger(__name__)
class Filtre(object):
def __init__(self,msg=None,tickets=None,identitat=None):
self.msg=msg
self.tickets=tickets
self.identitat=identitat
def set_mail(self,msg):
self.msg=msg
def set_tickets(self,tickets):
self.tickets=tickets
def set_identitat(self,identitat):
self.identitat=identitat
def es_aplicable(self):
return False
def filtrar(self):
return
def get_uid(self):
uid=self.identitat.obtenir_uid(self.msg.get_from())
if uid != None:
return uid
if self.msg.get_reply_to() != None:
return self.identitat.obtenir_uid(self.msg.get_reply_to())
return None
def codificar_base_64_si_cal(self,attachment):
if attachment['Content-Transfer-Encoding']=='base64':
return attachment.get_payload()
else:
return base64.b64encode(attachment.get_payload())
def afegir_attachments_canviant_body(self,ticket_id,username,body):
(cids,ids)=self.afegir_attachments(ticket_id,username)
body=self.tractar_attachments_inline(body,cids)
body=self.afegir_links_attachments(body,ids)
return body
def afegir_attachments(self,ticket_id,username):
logger.info("Tractem attachments del ticket %s" % ticket_id)
i=0;
cids={}
ids={}
for a in self.msg.get_attachments():
ctype=a.get_content_type()
fitxer=a.get_filename()
cid=a.get('Content-ID')
i+=1
if fitxer==None:
fitxer='attach%d.%s' % (i,ctype.split("/")[1])
logger.info("Afegim attachment: %s / %s" % (fitxer,cid))
codi_annex=self.tickets.annexar_fitxer_tiquet(ticket_id,username,fitxer, self.codificar_base_64_si_cal(a))
if cid!=None:
cids[cid[1:-1]]=codi_annex
else:
ids[codi_annex]=a
return (cids,ids)
def tractar_attachments_inline(self,html,cids):
for cid in cids:
id_attachment=cids[cid]
html=html.replace("cid:"+cid,self.url_attachment(id_attachment))
return html
def afegir_links_attachments(self,html,ids):
if len(ids)==0:
return html
html+="<br><br>Attachments:<ul>"
for id_attachment in ids:
a=ids[id]
url=self.url_attachment(id_attachment)
html+="<li><a href=\"%s\">%s (%s)</a>" % (url,a.get_filename(),a.get_content_type())
html+="</ul>"
return html
def url_attachment(self,id_attachment):
return "/tiquetsusuaris/control/file?fileId=%s" % id_attachment | import base64
import logging
logger = logging.getLogger(__name__)
class Filtre(object):
def __init__(self,msg=None,tickets=None,identitat=None):
self.msg=msg
self.tickets=tickets
self.identitat=identitat
def set_mail(self,msg):
self.msg=msg
def set_tickets(self,tickets):
self.tickets=tickets
def set_identitat(self,identitat):
self.identitat=identitat
def es_aplicable(self):
return False
def filtrar(self):
return
def get_uid(self):
uid=self.identitat.obtenir_uid(self.msg.get_from())
if uid != None:
return uid
if self.msg.get_reply_to() != None:
return self.identitat.obtenir_uid(self.msg.get_reply_to())
return None
def codificar_base_64_si_cal(self,attachment):
if attachment['Content-Transfer-Encoding']=='base64':
return attachment.get_payload()
else:
return base64.b64encode(attachment.get_payload())
def afegir_attachments_canviant_body(self,ticket_id,username,body):
(cids,ids)=self.afegir_attachments(ticket_id,username)
body=self.tractar_attachments_inline(body,cids)
body=self.afegir_links_attachments(body,ids)
return body
def afegir_attachments(self,ticket_id,username):
logger.info("Tractem attachments del ticket %s" % ticket_id)
i=0;
cids={}
ids={}
for a in self.msg.get_attachments():
ctype=a.get_content_type()
fitxer=a.get_filename()
cid=a.get('Content-ID')
i+=1
if fitxer==None:
fitxer='attach%d.%s' % (i,ctype.split("/")[1])
logger.info("Afegim attachment: %s / %s" % (fitxer,cid))
codi_annex=self.tickets.annexar_fitxer_tiquet(ticket_id,username,fitxer, self.codificar_base_64_si_cal(a))
if cid!=None:
cids[cid[1:-1]]=codi_annex
else:
ids[codi_annex]=a
return (cids,ids)
def tractar_attachments_inline(self,html,cids):
for cid in cids:
id_attachment=cids[cid]
html=html.replace("cid:"+cid,self.url_attachment(id_attachment))
return html
def afegir_links_attachments(self,html,ids):
if len(ids)==0:
return html
html+="<br><br>Attachments:<ul>"
for id_attachment in ids:
a=ids[id_attachment]
url=self.url_attachment(id_attachment)
html+="<li><a href=\"%s\">%s (%s)</a>" % (url,a.get_filename(),a.get_content_type())
html+="</ul>"
return html
def url_attachment(self,id_attachment):
return "/tiquetsusuaris/control/file?fileId=%s" % id_attachment | Python | 0 |
69c590d7cf2d328b9e6ef63ddf49933e67df9614 | fix typo | statsd/gauge.py | statsd/gauge.py | import statsd
class Gauge(statsd.Client):
'''Class to implement a statsd gauge
'''
def send(self, subname, value):
'''Send the data to statsd via self.connection
:keyword subname: The subname to report the data to (appended to the
client name)
:keyword value: The gauge value to send
'''
name = self._get_name(self.name, subname)
self.logger.info('%s: %d', name, value)
return statsd.Client._send(self, {name: '%d|g' % value})
| import statsd
class Gauge(statsd.Client):
'''Class to implement a statd gauge
'''
def send(self, subname, value):
'''Send the data to statsd via self.connection
:keyword subname: The subname to report the data to (appended to the
client name)
:keyword value: The gauge value to send
'''
name = self._get_name(self.name, subname)
self.logger.info('%s: %d', name, value)
return statsd.Client._send(self, {name: '%d|g' % value})
| Python | 0.999991 |
b97edcc911419197099338085f0f2937286dead0 | Bump version | galaxy/__init__.py | galaxy/__init__.py | # (c) 2012-2014, Ansible, Inc. <support@ansible.com>
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os.path
import sys
import warnings
__version__ = '2.0.1'
__all__ = ['__version__']
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands:
commands.append(f[:-4])
except OSError:
pass
return commands
def prepare_env():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings')
local_site_packages = os.path.join(os.path.dirname(__file__), 'lib', 'site-packages')
sys.path.insert(0, local_site_packages)
from django.conf import settings
if not settings.DEBUG:
warnings.simplefilter('ignore', DeprecationWarning)
import django.utils
settings.version = __version__
def manage():
# Prepare the galaxy environment.
prepare_env()
# Now run the command (or display the version).
from django.core.management import execute_from_command_line
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'):
sys.stdout.write('galaxy-%s\n' % __version__)
else:
execute_from_command_line(sys.argv)
| # (c) 2012-2014, Ansible, Inc. <support@ansible.com>
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os.path
import sys
import warnings
__version__ = '2.0.0'
__all__ = ['__version__']
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands:
commands.append(f[:-4])
except OSError:
pass
return commands
def prepare_env():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings')
local_site_packages = os.path.join(os.path.dirname(__file__), 'lib', 'site-packages')
sys.path.insert(0, local_site_packages)
from django.conf import settings
if not settings.DEBUG:
warnings.simplefilter('ignore', DeprecationWarning)
import django.utils
settings.version = __version__
def manage():
# Prepare the galaxy environment.
prepare_env()
# Now run the command (or display the version).
from django.core.management import execute_from_command_line
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'):
sys.stdout.write('galaxy-%s\n' % __version__)
else:
execute_from_command_line(sys.argv)
| Python | 0 |
b99de7f7d91c1be99ee7ea04da997d48126fd08b | fix model finance | finance/models.py | finance/models.py | from django.db import models
from datetime import datetime
class TypeLaunch(models.Model):
type_name = models.CharField(max_length=100, unique=True)
class Provider(models.Model):
description = models.CharField(max_length=100, unique=True)
type_launch = models.ForeignKey(TypeLaunch, blank=True, null=True)
date_last_purchase = models.DateField('date last purchase')
value_total = models.DecimalField(max_digits=5, decimal_places=2)
class Extract(models.Model):
date_launch = models.DateField('date launch')
launch = models.CharField(max_length=100)
date_purchase = models.DateField('date purchase')
value_debit = models.DecimalField(max_digits=5, decimal_places=2)
value_credit = models.DecimalField(max_digits=5, decimal_places=2)
value_balance = models.DecimalField(max_digits=5, decimal_places=2)
cancelled = models.BooleanField(default=True, db_index=True)
provider = models.ForeignKey(Provider, blank=True, null=True)
def str_to_date(self, date_launch, launch=''):
date = date_launch.replace('/','-')
if not launch is '' and launch.strip()[-3] == '/':
year = datetime.strptime(date, '%d-%m-%Y').date().year
date = launch.strip()[-5:].replace('/','-') + '-' + str(year)
return datetime.strptime(date, '%d-%m-%Y').date()
def str_to_float(self, value):
return float(value.replace(',','.'))
def importer(self, path):
with open(path, 'r') as ff:
contents = ff.readlines()
line = 0
extract = Extract()
while line <= len(contents):
date_launch, launch, value = contents[line].split(';')
extract.date_launch = extract.str_to_date(date_launch)
extract.launch = launch.strip() #.split('-')[:-1]
extract.date_purchase = extract.str_to_date(date_launch, launch)
if extract.str_to_float(value) < 0:
extract.value_debit = extract.str_to_float(value)
extract.value_credit = 0
else:
extract.value_debit = 0
extract.value_credit = extract.str_to_float(value)
extract.value_balance = 0
import pdb; pdb.set_trace()
extract.save()
line += 1
ff.close() | from django.db import models
from datetime import datetime
class TypeLaunch(models.Model):
type_name = models.CharField(max_length=100, unique=True)
class Provider(models.Model):
description = models.CharField(max_length=100, unique=True)
type_launch = models.ForeignKey(TypeLaunch, blank=True, null=True)
date_last_purchase = models.DateTimeField('date last purchase')
value_total = models.DecimalField(max_digits=5, decimal_places=2)
class Extract(models.Model):
date_launch = models.DateTimeField('date launch')
launch = models.CharField(max_length=100)
date_purchase = models.DateTimeField('date purchase')
value_debit = models.DecimalField(max_digits=5, decimal_places=2)
value_credit = models.DecimalField(max_digits=5, decimal_places=2)
value_balance = models.DecimalField(max_digits=5, decimal_places=2)
cancelled = models.BooleanField(default=True, db_index=True)
provider = models.ForeignKey(Provider, blank=True, null=True)
def str_to_date(self, date_launch, launch, year):
#import pdb; pdb.set_trace()
if launch.strip()[-3] == '/':
date = launch.split('-')[-1].strip()
date = date.replace('/','-') + '-' + str(year)
return datetime.strptime(date, '%d-%m-%Y').date()
def str_to_float(self, value):
return float(value.replace(',','.'))
def importer(self, path):
with open(path, 'r') as ff:
contents = ff.readlines()
line = 0
extract = Extract()
while line <= len(contents):
date_launch, launch, value = contents[line].split(';')
extract.date_launch = datetime.strptime(date_launch, '%d-%m-%Y').date()
extract.launch = launch.strip() #.split('-')[:-1]
year = extract.str_to_date(date_launch).year
extract.date_purchase = extract.str_to_date(date_launch, launch, year)
if extract.str_to_float(value) < 0:
extract.value_debit = extract.str_to_float(value)
extract.value_credit = 0
else:
extract.value_debit = 0
extract.value_credit = extract.str_to_float(value)
extract.value_balance = 0
extract.save()
line += 1
ff.close() | Python | 0.000001 |
747fa98c7a9ec7906dfba44e4860d300825eee39 | Drop Py2 and six on tests/integration/modules/test_key.py | tests/integration/modules/test_key.py | tests/integration/modules/test_key.py | import re
import pytest
from tests.support.case import ModuleCase
from tests.support.helpers import slowTest
@pytest.mark.windows_whitelisted
class KeyModuleTest(ModuleCase):
@slowTest
def test_key_finger(self):
"""
test key.finger to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
@slowTest
def test_key_finger_master(self):
"""
test key.finger_master to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger_master")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import re
import pytest
from tests.support.case import ModuleCase
from tests.support.helpers import slowTest
@pytest.mark.windows_whitelisted
class KeyModuleTest(ModuleCase):
@slowTest
def test_key_finger(self):
"""
test key.finger to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
@slowTest
def test_key_finger_master(self):
"""
test key.finger_master to ensure we receive a valid fingerprint
"""
out = self.run_function("key.finger_master")
match = re.match("([0-9a-z]{2}:){15,}[0-9a-z]{2}$", out)
self.assertTrue(match)
| Python | 0 |
6140507068c7a42a988bad951c1a6f120de741fb | Update cam_timeLapse_Threaded_upload.py | camera/timelapse/cam_timeLapse_Threaded_upload.py | camera/timelapse/cam_timeLapse_Threaded_upload.py | #!/usr/bin/env python2.7
import time
import os
from subprocess import call
import sys
class Logger(object):
def __init__(self):
self.terminal = sys.stdout
self.log = open("logfile.log", "a")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
sys.stdout = Logger()
UPLOAD_INTERVAL = 60
def upload_file(inpath, outpath):
uploadCmd = "/home/pi/Dropbox-Uploader/dropbox_uploader.sh upload %s %s" % (inpath, outpath)
call ([uploadCmd], shell=True)
while True:
# record start_time
start_time = time.time()
# initiate the upload process
inpath = "/home/pi/timelapse/latest/latest.jpg"
outpath = "latest.jpg"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
inpath = "/home/pi/timelapse/latest/latest.mp4"
outpath = "latest.mp4"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
# record end_time
end_time = time.time()
# determine elapsed time
elapsed_time = end_time - start_time
# determine how long to sleep
sleep_time = UPLOAD_INTERVAL - elapsed_time
# check for negative sleep request!
if (sleep_time < 1):
print "uploadThread: sleep_time < 1!!! (%s)" % sleep_time
sleep_time = 1
# sleep
print "uploadThread: sleeping for %s seconds" % sleep_time
time.sleep(sleep_time)
| #!/usr/bin/env python2.7
import time
import os
from subprocess import call
UPLOAD_INTERVAL = 60
def upload_file(inpath, outpath):
uploadCmd = "/home/pi/Dropbox-Uploader/dropbox_uploader.sh upload %s %s" % (inpath, outpath)
call ([uploadCmd], shell=True)
while True:
# record start_time
start_time = time.time()
# initiate the upload process
inpath = "/home/pi/timelapse/latest/latest.jpg"
outpath = "latest.jpg"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
inpath = "/home/pi/timelapse/latest/latest.mp4"
outpath = "latest.mp4"
if os.path.exists(inpath):
upload_file(inpath,outpath)
print "uploadThread: uploaded %s to %s" % (inpath,outpath)
else:
print "uploadThread: file %s does not exist, skipping" % (inpath)
# record end_time
end_time = time.time()
# determine elapsed time
elapsed_time = end_time - start_time
# determine how long to sleep
sleep_time = UPLOAD_INTERVAL - elapsed_time
# check for negative sleep request!
if (sleep_time < 1):
print "uploadThread: sleep_time < 1!!! (%s)" % sleep_time
sleep_time = 1
# sleep
print "uploadThread: sleeping for %s seconds" % sleep_time
time.sleep(sleep_time)
| Python | 0 |
16fca36c2032929589a718507a74c87bee52c161 | move planarAxiPotential to top-level | galpy/potential.py | galpy/potential.py | from galpy.potential_src import Potential
from galpy.potential_src import planarPotential
from galpy.potential_src import linearPotential
from galpy.potential_src import verticalPotential
from galpy.potential_src import MiyamotoNagaiPotential
from galpy.potential_src import LogarithmicHaloPotential
from galpy.potential_src import DoubleExponentialDiskPotential
from galpy.potential_src import PowerSphericalPotential
from galpy.potential_src import TwoPowerSphericalPotential
from galpy.potential_src import plotRotcurve
from galpy.potential_src import plotEscapecurve
from galpy.potential_src import KGPotential
from galpy.potential_src import interpRZPotential
#
# Functions
#
evaluatePotentials= Potential.evaluatePotentials
evaluateDensities= Potential.evaluateDensities
evaluateRforces= Potential.evaluateRforces
evaluatephiforces= Potential.evaluatephiforces
evaluatezforces= Potential.evaluatezforces
RZToplanarPotential= planarPotential.RZToplanarPotential
RZToverticalPotential= verticalPotential.RZToverticalPotential
plotPotentials= Potential.plotPotentials
plotRotcurve= plotRotcurve.plotRotcurve
plotEscapecurve= plotEscapecurve.plotEscapecurve
#
# Classes
#
Potential= Potential.Potential
planarAxiPotential= planarPotential.planarAxiPotential
planarPotential= planarPotential.planarPotential
linearPotential= linearPotential.linearPotential
MiyamotoNagaiPotential= MiyamotoNagaiPotential.MiyamotoNagaiPotential
DoubleExponentialDiskPotential= DoubleExponentialDiskPotential.DoubleExponentialDiskPotential
LogarithmicHaloPotential= LogarithmicHaloPotential.LogarithmicHaloPotential
KeplerPotential= PowerSphericalPotential.KeplerPotential
PowerSphericalPotential= PowerSphericalPotential.PowerSphericalPotential
NFWPotential= TwoPowerSphericalPotential.NFWPotential
JaffePotential= TwoPowerSphericalPotential.JaffePotential
HernquistPotential= TwoPowerSphericalPotential.HernquistPotential
TwoPowerSphericalPotential= TwoPowerSphericalPotential.TwoPowerSphericalPotential
KGPotential= KGPotential.KGPotential
interpRZPotential= interpRZPotential.interpRZPotential
| from galpy.potential_src import Potential
from galpy.potential_src import planarPotential
from galpy.potential_src import linearPotential
from galpy.potential_src import verticalPotential
from galpy.potential_src import MiyamotoNagaiPotential
from galpy.potential_src import LogarithmicHaloPotential
from galpy.potential_src import DoubleExponentialDiskPotential
from galpy.potential_src import PowerSphericalPotential
from galpy.potential_src import TwoPowerSphericalPotential
from galpy.potential_src import plotRotcurve
from galpy.potential_src import plotEscapecurve
from galpy.potential_src import KGPotential
from galpy.potential_src import interpRZPotential
#
# Functions
#
evaluatePotentials= Potential.evaluatePotentials
evaluateDensities= Potential.evaluateDensities
evaluateRforces= Potential.evaluateRforces
evaluatephiforces= Potential.evaluatephiforces
evaluatezforces= Potential.evaluatezforces
RZToplanarPotential= planarPotential.RZToplanarPotential
RZToverticalPotential= verticalPotential.RZToverticalPotential
plotPotentials= Potential.plotPotentials
plotRotcurve= plotRotcurve.plotRotcurve
plotEscapecurve= plotEscapecurve.plotEscapecurve
#
# Classes
#
Potential= Potential.Potential
planarPotential= planarPotential.planarPotential
linearPotential= linearPotential.linearPotential
MiyamotoNagaiPotential= MiyamotoNagaiPotential.MiyamotoNagaiPotential
DoubleExponentialDiskPotential= DoubleExponentialDiskPotential.DoubleExponentialDiskPotential
LogarithmicHaloPotential= LogarithmicHaloPotential.LogarithmicHaloPotential
KeplerPotential= PowerSphericalPotential.KeplerPotential
PowerSphericalPotential= PowerSphericalPotential.PowerSphericalPotential
NFWPotential= TwoPowerSphericalPotential.NFWPotential
JaffePotential= TwoPowerSphericalPotential.JaffePotential
HernquistPotential= TwoPowerSphericalPotential.HernquistPotential
TwoPowerSphericalPotential= TwoPowerSphericalPotential.TwoPowerSphericalPotential
KGPotential= KGPotential.KGPotential
interpRZPotential= interpRZPotential.interpRZPotential
| Python | 0 |
0a4265282f240dc52acac4347636417a14274ada | update dict list in mixfeatures | code/python/seizures/features/MixFeatures.py | code/python/seizures/features/MixFeatures.py | import numpy as np
from seizures.features.FeatureExtractBase import FeatureExtractBase
from seizures.features.ARFeatures import ARFeatures
from seizures.features.FFTFeatures import FFTFeatures
from seizures.features.PLVFeatures import PLVFeatures
from seizures.features.RandomFeatures import RandomFeatures
from seizures.features.XCHUHFeatures import XCHUHFeatures
from seizures.features.SEFeatures import SEFeatures
from seizures.features.LyapunovFeatures import LyapunovFeatures
class MixFeatures(FeatureExtractBase):
"""
Class to concatenate output of individual feature classes.
@author V&J
"""
def __init__(self, features_list):
"""
Wittawat: features_list is a list L of dictionaries D's where
D is of the form {'name': 'Name of a class extending FeatureExtractBase',
'args': 'arguments (a kwargs dictionary) to class constructor'}. ?
"""
self.features_list = features_list
def extract(self, instance):
feature_class_dict = {"ARFeatures":ARFeatures,
"FFTFeatures":FFTFeatures,
"PLVFeatures":PLVFeatures,
"RandomFeatures":RandomFeatures,
"SEFeatures":SEFeatures,
"LyapunovFeatures":LyapunovFeatures}
extracted_features_list = []
for feature_string in self.features_list:
if feature_string['name'] in feature_class_dict:
kwargs = feature_string['args']
feature_object = feature_class_dict[feature_string['name']](**kwargs)
extracted_features_list.append(np.hstack(feature_object.extract(instance))) #flattened
else:
print "feature not in list !!!"
return np.hstack(extracted_features_list)
#------- end of MixFeatures ---------------
class StackFeatures(FeatureExtractBase):
"""
A meta feature generator which stacks features generated from other
FeatureExtractBase's. Semantically this feature generator is the same as
MixFeatures but directly takes in objects of subclass of
FeatureExtractBase, unlike MixFeatures.
(I am just not comfortable passing class handle and bunch of arguments)
@author Wittawat
"""
def __init__(self, *feature_generators):
"""
Input:
feature_generators: a list of objects of subclass of FeatureExtractBase
"""
self.feature_generators = feature_generators
def extract(self, instance):
extracted_features_list = []
for generator in self.feature_generators:
# a feature vector
assert(isinstance(generator, FeatureExtractBase))
feature = generator.extract(instance)
extracted_features_list.append(np.hstack(feature));
return np.hstack(extracted_features_list)
def __str__(self):
subs = [str(e) for e in self.feature_generators]
return 'Stack' + '(%s)'% (', '.join(subs))
| import numpy as np
from seizures.features.FeatureExtractBase import FeatureExtractBase
from seizures.features.ARFeatures import ARFeatures
from seizures.features.FFTFeatures import FFTFeatures
from seizures.features.PLVFeatures import PLVFeatures
from seizures.features.RandomFeatures import RandomFeatures
from seizures.features.XCHUHFeatures import XCHUHFeatures
class MixFeatures(FeatureExtractBase):
"""
Class to concatenate output of individual feature classes.
@author V&J
"""
def __init__(self, features_list):
"""
Wittawat: features_list is a list L of dictionaries D's where
D is of the form {'name': 'Name of a class extending FeatureExtractBase',
'args': 'arguments (a kwargs dictionary) to class constructor'}. ?
"""
self.features_list = features_list
def extract(self, instance):
feature_class_dict = {"ARFeatures":ARFeatures,
"FFTFeatures":FFTFeatures,
"PLVFeatures":PLVFeatures,
"RandomFeatures":RandomFeatures}
extracted_features_list = []
for feature_string in self.features_list:
if feature_string['name'] in feature_class_dict:
kwargs = feature_string['args']
feature_object = feature_class_dict[feature_string['name']](**kwargs)
extracted_features_list.append(np.hstack(feature_object.extract(instance))) #flattened
else:
print "feature not in list !!!"
return np.hstack(extracted_features_list)
#------- end of MixFeatures ---------------
class StackFeatures(FeatureExtractBase):
"""
A meta feature generator which stacks features generated from other
FeatureExtractBase's. Semantically this feature generator is the same as
MixFeatures but directly takes in objects of subclass of
FeatureExtractBase, unlike MixFeatures.
(I am just not comfortable passing class handle and bunch of arguments)
@author Wittawat
"""
def __init__(self, *feature_generators):
"""
Input:
feature_generators: a list of objects of subclass of FeatureExtractBase
"""
self.feature_generators = feature_generators
def extract(self, instance):
extracted_features_list = []
for generator in self.feature_generators:
# a feature vector
assert(isinstance(generator, FeatureExtractBase))
feature = generator.extract(instance)
extracted_features_list.append(np.hstack(feature));
return np.hstack(extracted_features_list)
def __str__(self):
subs = [str(e) for e in self.feature_generators]
return 'Stack' + '(%s)'% (', '.join(subs))
| Python | 0 |
198e5256063d43006b5c245866604f5bd746cfcd | Allow the plugin to be loaded from a query | plugins/Success/plugin.py | plugins/Success/plugin.py | ###
# Copyright (c) 2005, Daniel DiPaolo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
from supybot.i18n import PluginInternationalization, internationalizeDocstring
_ = PluginInternationalization('Success')
class Success(plugins.ChannelIdDatabasePlugin):
"""This plugin was written initially to work with MoobotFactoids, the two
of them to provide a similar-to-moobot-and-blootbot interface for factoids.
Basically, it replaces the standard 'The operation succeeded.' messages
with messages kept in a database, able to give more personable
responses."""
def __init__(self, irc):
self.__parent = super(Success, self)
self.__parent.__init__(irc)
self.target = None
pluginSelf = self
self.originalClass = conf.supybot.replies.success.__class__
class MySuccessClass(self.originalClass):
def __call__(self):
ret = pluginSelf.db.random(dynamic.msg.args[0])
if ret is None:
try:
self.__class__ = pluginSelf.originalClass
ret = self()
finally:
self.__class__ = MySuccessClass
else:
ret = ret.text
return ret
def get(self, attr):
if ircutils.isChannel(attr):
pluginSelf.target = attr
return self
conf.supybot.replies.success.__class__ = MySuccessClass
def die(self):
self.__parent.die()
conf.supybot.replies.success.__class__ = self.originalClass
Success = internationalizeDocstring(Success)
Class = Success
# vim:set shiftwidth=4 softtabstop=8 expandtab textwidth=78:
| ###
# Copyright (c) 2005, Daniel DiPaolo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
from supybot.i18n import PluginInternationalization, internationalizeDocstring
_ = PluginInternationalization('Success')
class Success(plugins.ChannelIdDatabasePlugin):
"""This plugin was written initially to work with MoobotFactoids, the two
of them to provide a similar-to-moobot-and-blootbot interface for factoids.
Basically, it replaces the standard 'The operation succeeded.' messages
with messages kept in a database, able to give more personable
responses."""
def __init__(self, irc):
self.__parent = super(Success, self)
self.__parent.__init__(irc)
self.target = None
pluginSelf = self
self.originalClass = conf.supybot.replies.success.__class__
class MySuccessClass(self.originalClass):
def __call__(self):
ret = pluginSelf.db.random(pluginSelf.target or 'private_query')
if ret is None:
try:
self.__class__ = pluginSelf.originalClass
ret = self()
finally:
self.__class__ = MySuccessClass
else:
ret = ret.text
return ret
def get(self, attr):
if ircutils.isChannel(attr):
pluginSelf.target = attr
return self
conf.supybot.replies.success.__class__ = MySuccessClass
def die(self):
self.__parent.die()
conf.supybot.replies.success.__class__ = self.originalClass
def inFilter(self, irc, msg):
# We need the target, but we need it before Owner.doPrivmsg is called,
# so this seems like the only way to do it.
self.target = msg.args[0]
return msg
Success = internationalizeDocstring(Success)
Class = Success
# vim:set shiftwidth=4 softtabstop=8 expandtab textwidth=78:
| Python | 0 |
4c084313d2e27a620f194e6282a51aa1e94f7a35 | Change chunk so it only takes an int | node/floor_divide.py | node/floor_divide.py | #!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
def chunk(self, inp:Node.indexable, num:int):
"""Return inp seperated into num groups"""
rtn = []
last = 0
size = len(inp)//num
for i in range(size, len(inp), size):
rtn.append(inp[last:i])
last = i
if len(rtn) != num:
rtn.append(inp[last:])
else:
rtn[-1] += inp[last:]
if len(rtn):
if isinstance(inp, str):
rtn[-1] = "".join(rtn[-1])
else:
rtn[-1] = type(inp)(rtn[-1])
return [rtn] | #!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
def chunk(self, inp:Node.indexable, num:Node.number):
"""Return inp seperated into num groups"""
rtn = []
last = 0
size = len(inp)//num
for i in range(size, len(inp), size):
rtn.append(inp[last:i])
last = i
if len(rtn) != num:
rtn.append(inp[last:])
else:
rtn[-1] += inp[last:]
if len(rtn):
if isinstance(inp, str):
rtn[-1] = "".join(rtn[-1])
else:
rtn[-1] = type(inp)(rtn[-1])
return [rtn] | Python | 0.000002 |
942e3b183859623d2f2a6bf874f8d763e960ea5b | Print AST during integration test | tests/integration/test_integration.py | tests/integration/test_integration.py | import collections
import io
import json
import os
import pytest
import glob
import subprocess
import thinglang
from thinglang import run, utils
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
SEARCH_PATTERN = os.path.join(BASE_PATH, '**/*.thing')
TestCase = collections.namedtuple('TestCase', ['code', 'metadata', 'name', 'bytecode_target'])
def collect_tests():
for path in glob.glob(SEARCH_PATTERN, recursive=True):
with open(path, 'r') as f:
contents = f.read()
metadata_start = contents.index('/*') + 2
metadata_end = contents.index('*/')
metadata = json.loads(contents[metadata_start:metadata_end])
yield TestCase(
contents[metadata_end + 2:],
metadata,
metadata.get('test_name') or '.'.join(path.replace('.thing', '').split(os.sep)[-2:]),
path + 'c'
)
def split_lines(param):
return param.replace('\r', '').split('\n')
@pytest.mark.parametrize('test_file', collect_tests(), ids=lambda x: x.name)
def test_thing_program(test_file):
expected_output = test_file.metadata['expected_output']
utils.print_header('Parsed AST')
ast = thinglang.compiler(test_file.code)
print(ast.tree())
utils.print_header("Bytecode generation")
bytecode = ast.compile().finalize()
print(bytecode)
utils.print_header('VM execution')
with open(test_file.bytecode_target, 'wb') as f:
f.write(bytecode)
vm = subprocess.Popen(["thinglang", test_file.bytecode_target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = (stream.decode('utf-8').strip() for stream in vm.communicate())
print(stderr)
utils.print_header('VM output')
print(stdout)
local = thinglang.run(test_file.code).output
if not isinstance(expected_output, str):
stdout = split_lines(stdout)
local = split_lines(local)
assert vm.returncode == 0, 'VM process crashed'
assert local == expected_output, 'Execution engine output did not match expected output'
assert stdout == expected_output, 'VM output did not match expected output'
| import collections
import io
import json
import os
import pytest
import glob
import subprocess
import thinglang
from thinglang import run, utils
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
SEARCH_PATTERN = os.path.join(BASE_PATH, '**/*.thing')
TestCase = collections.namedtuple('TestCase', ['code', 'metadata', 'name', 'bytecode_target'])
def collect_tests():
for path in glob.glob(SEARCH_PATTERN, recursive=True):
with open(path, 'r') as f:
contents = f.read()
metadata_start = contents.index('/*') + 2
metadata_end = contents.index('*/')
metadata = json.loads(contents[metadata_start:metadata_end])
yield TestCase(
contents[metadata_end + 2:],
metadata,
metadata.get('test_name') or '.'.join(path.replace('.thing', '').split(os.sep)[-2:]),
path + 'c'
)
def split_lines(param):
return param.replace('\r', '').split('\n')
@pytest.mark.parametrize('test_file', collect_tests(), ids=lambda x: x.name)
def test_thing_program(test_file):
expected_output = test_file.metadata['expected_output']
utils.print_header("Bytecode generation")
bytecode = thinglang.compiler(test_file.code).compile().finalize()
print(bytecode)
utils.print_header('VM execution')
with open(test_file.bytecode_target, 'wb') as f:
f.write(bytecode)
vm = subprocess.Popen(["thinglang", test_file.bytecode_target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = (stream.decode('utf-8').strip() for stream in vm.communicate())
print(stderr)
utils.print_header('VM output')
print(stdout)
local = thinglang.run(test_file.code).output
if not isinstance(expected_output, str):
stdout = split_lines(stdout)
local = split_lines(local)
assert vm.returncode == 0, 'VM process crashed'
assert local == expected_output, 'Execution engine output did not match expected output'
assert stdout == expected_output, 'VM output did not match expected output'
| Python | 0.000001 |
4fc0162c73178678281c0e09cf32ffefa4b7b923 | Handle unavailable server | flasque/client.py | flasque/client.py | # -*- coding: utf8 -*-
import json
import time
import Queue
import requests
import threading
class ThreadQueue(threading.Thread):
def __init__(self, api, qname, *args, **kwargs):
super(ThreadQueue, self).__init__(*args, **kwargs)
self.api = api
self.qname = qname
self.q = Queue.Queue()
self.daemon = True
self._stop = threading.Event()
def run(self):
while True:
self.loop()
@staticmethod
def make_request(func, *args, **kwargs):
while True:
try:
return func(*args, **kwargs)
except requests.exceptions.RequestException:
time.sleep(1)
def get(self, *args, **kwargs):
return self.q.get(*args, **kwargs)
def put(self, *args, **kwargs):
return self.q.put(*args, **kwargs)
def task_done(self):
return self.q.task_done()
def stop(self):
self._stop.set()
def close(self):
self._stop()
self.join()
class Producer(ThreadQueue):
def loop(self):
try:
data = self.get(timeout=1)
except Queue.Empty:
pass
else:
self.make_request(
requests.post,
self.api + "/queue/" + self.qname,
data=data,
)
if self._stop.is_set():
return
class Consumer(ThreadQueue):
def loop(self):
res = self.make_request(
requests.get,
self.api + "/queue/",
params={"q": self.qname},
stream=True,
)
for line in res.iter_lines(chunk_size=1):
if self._stop.is_set():
return
res = json.loads(line)
self.q.put(res["data"])
self.q.join()
self.make_request(
requests.delete,
self.api + "/queue/" + res["q"],
params={"msgid": res["msgid"]},
)
class Connection(object):
def __init__(self, api="http://localhost:5000"):
self.api = api
self.threads = []
super(Connection, self).__init__()
def Producer(self, qname):
producer = Producer(self.api, qname)
producer.start()
self.threads.append(producer)
return producer
def Consumer(self, *qname):
consumer = Consumer(self.api, qname)
consumer.start()
self.threads.append(consumer)
return consumer
def close(self):
for th in self.threads:
th.stop()
for th in self.threads:
th.join()
self.threads = []
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
| # -*- coding: utf8 -*-
import json
import Queue
import requests
import threading
class ThreadQueue(threading.Thread):
def __init__(self, api, qname, *args, **kwargs):
super(ThreadQueue, self).__init__(*args, **kwargs)
self.api = api
self.qname = qname
self.q = Queue.Queue()
self.daemon = True
self._stop = threading.Event()
def run(self):
raise NotImplementedError
def get(self, *args, **kwargs):
return self.q.get(*args, **kwargs)
def put(self, *args, **kwargs):
return self.q.put(*args, **kwargs)
def task_done(self):
return self.q.task_done()
def stop(self):
self._stop.set()
def close(self):
self._stop()
self.join()
class Producer(ThreadQueue):
def run(self):
while True:
try:
data = self.get(timeout=1)
except Queue.Empty:
pass
else:
requests.post(self.api + "/queue/" + self.qname, data=data)
if self._stop.is_set():
return
class Consumer(ThreadQueue):
def run(self):
while True:
res = requests.get(
self.api + "/queue/",
params={"q": self.qname},
stream=True,
)
for line in res.iter_lines(chunk_size=1):
if self._stop.is_set():
return
res = json.loads(line)
self.q.put(res["data"])
self.q.join()
requests.delete(
self.api + "/queue/" + res["q"],
params={"msgid": res["msgid"]},
)
class Connection(object):
def __init__(self, api="http://localhost:5000"):
self.api = api
self.threads = []
super(Connection, self).__init__()
def Producer(self, qname):
producer = Producer(self.api, qname)
producer.start()
self.threads.append(producer)
return producer
def Consumer(self, *qname):
consumer = Consumer(self.api, qname)
consumer.start()
self.threads.append(consumer)
return consumer
def close(self):
for th in self.threads:
th.stop()
for th in self.threads:
th.join()
self.threads = []
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
| Python | 0.000001 |
7dd10d88b89da4a10db45d6393fd05d0d2dc718e | Change get_check_by_name optional argument | pingdombackup/PingdomBackup.py | pingdombackup/PingdomBackup.py | from calendar import timegm
from datetime import datetime, timedelta
from .Pingdom import Pingdom
from .Database import Database
from .log import log
class PingdomBackup:
MAX_INTERVAL = 2764800
def __init__(self, email, password, app_key, database):
self.pingdom = Pingdom(email, password, app_key)
self.database = Database(database)
def update_probes(self):
# get the probe list
log.info('Updating probe records.')
resp_json = self.pingdom.api('GET', 'probes', params={'includedeleted': True})
probes = resp_json['probes']
for probe in probes:
self.database.upsert_record('probes', probe)
log.info('{0} {1} updated.'.format(len(probes), 'probe was' if len(probes) == 1 else 'probes were'))
def update_checks(self):
# get the checks list
log.info('Updating check records.')
resp_json = self.pingdom.api('GET', 'checks')
checks = resp_json['checks']
for check in checks:
del check['tags']
self.database.upsert_record('checks', check)
log.info('{0} {1} updated.'.format(len(checks), 'check was' if len(checks) == 1 else 'checks were'))
def get_check_by_name(self, name):
return self.database.get_record('checks', where='name = ?', parameters=(name, ))
def update_results(self, check):
log.info('Checking for new results.')
# get the most recent result time from the database
results = self.database.get_records('results', order_by='time DESC', limit=1)
if len(results) == 0:
min_from_t = 0
else:
# + 1 because we don't want to include the previous result
min_from_t = results[0]['time'] + 1
to_t = timegm((datetime.now() + timedelta(days=2)).timetuple())
limit = 1000
last_count = limit
all_results = []
while last_count == limit:
# calculate the minimum bound
from_t = max(to_t - self.MAX_INTERVAL, min_from_t)
# get the next page
resp_json = self.pingdom.api('GET', 'results/{0}'.format(check['id']), params={
'to': to_t,
'from': from_t,
'limit': limit
})
results = resp_json['results']
last_count = len(results)
# inspect each row
for result in results:
result['id'] = None
result['checkid'] = check['id']
# update the to_timestamp
if result['time'] < to_t:
to_t = result['time']
all_results.extend(results)
# bulk insert
all_results = sorted(all_results, key=lambda r: r['time'])
log.info('{0} new {1} been found.'.format(len(all_results), 'result has' if len(all_results) == 1 else 'results have'))
self.database.insert_records('results', all_results)
| from calendar import timegm
from datetime import datetime, timedelta
from .Pingdom import Pingdom
from .Database import Database
from .log import log
class PingdomBackup:
MAX_INTERVAL = 2764800
def __init__(self, email, password, app_key, database):
self.pingdom = Pingdom(email, password, app_key)
self.database = Database(database)
def update_probes(self):
# get the probe list
log.info('Updating probe records.')
resp_json = self.pingdom.api('GET', 'probes', params={'includedeleted': True})
probes = resp_json['probes']
for probe in probes:
self.database.upsert_record('probes', probe)
log.info('{0} {1} updated.'.format(len(probes), 'probe was' if len(probes) == 1 else 'probes were'))
def update_checks(self):
# get the checks list
log.info('Updating check records.')
resp_json = self.pingdom.api('GET', 'checks')
checks = resp_json['checks']
for check in checks:
del check['tags']
self.database.upsert_record('checks', check)
log.info('{0} {1} updated.'.format(len(checks), 'check was' if len(checks) == 1 else 'checks were'))
def get_check_by_name(self, name, from_api=True):
if from_api:
self.update_checks()
return self.database.get_record('checks', where='name = ?', parameters=(name, ))
def update_results(self, check):
log.info('Checking for new results.')
# get the most recent result time from the database
results = self.database.get_records('results', order_by='time DESC', limit=1)
if len(results) == 0:
min_from_t = 0
else:
# + 1 because we don't want to include the previous result
min_from_t = results[0]['time'] + 1
to_t = timegm((datetime.now() + timedelta(days=2)).timetuple())
limit = 1000
last_count = limit
all_results = []
while last_count == limit:
# calculate the minimum bound
from_t = max(to_t - self.MAX_INTERVAL, min_from_t)
# get the next page
resp_json = self.pingdom.api('GET', 'results/{0}'.format(check['id']), params={
'to': to_t,
'from': from_t,
'limit': limit
})
results = resp_json['results']
last_count = len(results)
# inspect each row
for result in results:
result['id'] = None
result['checkid'] = check['id']
# update the to_timestamp
if result['time'] < to_t:
to_t = result['time']
all_results.extend(results)
# bulk insert
all_results = sorted(all_results, key=lambda r: r['time'])
log.info('{0} new {1} been found.'.format(len(all_results), 'record has' if len(all_results) == 1 else 'records have'))
self.database.insert_records('results', all_results)
| Python | 0.000003 |
0dd41b65aaa0798a7a72a0d61d746bfa29bc3aad | Allow POST of fly and worm donors | src/encoded/types/donor.py | src/encoded/types/donor.py | from ..schema_utils import (
load_schema,
)
from ..contentbase import (
location,
)
from .base import (
ACCESSION_KEYS,
ALIAS_KEYS,
Collection,
paths_filtered_by_status,
)
class DonorItem(Collection.Item):
base_types = ['donor'] + Collection.Item.base_types
embedded = set(['organism'])
name_key = 'accession'
keys = ACCESSION_KEYS + ALIAS_KEYS
rev = {
'characterizations': ('donor_characterization', 'characterizes'),
}
template = {
'characterizations': (
lambda root, characterizations: paths_filtered_by_status(root, characterizations)
),
}
@location('mouse-donors')
class MouseDonor(Collection):
item_type = 'mouse_donor'
schema = load_schema('mouse_donor.json')
__acl__ = []
properties = {
'title': 'Mouse donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
def __ac_local_roles__(self):
# Disallow lab submitter edits
return {}
@location('fly-donors')
class FlyDonor(Collection):
item_type = 'fly_donor'
schema = load_schema('fly_donor.json')
properties = {
'title': 'Fly donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('worm-donors')
class WormDonor(Collection):
item_type = 'worm_donor'
schema = load_schema('worm_donor.json')
properties = {
'title': 'Worm donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('human-donors')
class HumanDonor(Collection):
item_type = 'human_donor'
schema = load_schema('human_donor.json')
properties = {
'title': 'Human donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
pass
| from ..schema_utils import (
load_schema,
)
from ..contentbase import (
location,
)
from .base import (
ACCESSION_KEYS,
ALIAS_KEYS,
Collection,
paths_filtered_by_status,
)
class DonorItem(Collection.Item):
base_types = ['donor'] + Collection.Item.base_types
embedded = set(['organism'])
name_key = 'accession'
keys = ACCESSION_KEYS + ALIAS_KEYS
rev = {
'characterizations': ('donor_characterization', 'characterizes'),
}
template = {
'characterizations': (
lambda root, characterizations: paths_filtered_by_status(root, characterizations)
),
}
@location('mouse-donors')
class MouseDonor(Collection):
item_type = 'mouse_donor'
schema = load_schema('mouse_donor.json')
__acl__ = []
properties = {
'title': 'Mouse donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
def __ac_local_roles__(self):
# Disallow lab submitter edits
return {}
@location('fly-donors')
class FlyDonor(Collection):
item_type = 'fly_donor'
schema = load_schema('fly_donor.json')
__acl__ = []
properties = {
'title': 'Fly donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('worm-donors')
class WormDonor(Collection):
item_type = 'worm_donor'
schema = load_schema('worm_donor.json')
__acl__ = []
properties = {
'title': 'Worm donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
embedded = set(['organism', 'constructs', 'constructs.target'])
@location('human-donors')
class HumanDonor(Collection):
item_type = 'human_donor'
schema = load_schema('human_donor.json')
properties = {
'title': 'Human donors',
'description': 'Listing Biosample Donors',
}
class Item(DonorItem):
pass
| Python | 0 |
b9133e2fe7444b4449ab67f4d726c20ce5e21cd8 | clean ups in presentation of names | gazetteer/admin.py | gazetteer/admin.py | from django.contrib import admin
from django import forms
from gazetteer.models import *
from skosxl.models import Notation
from .settings import TARGET_NAMESPACE_FT
# Register your models here.
# works for Dango > 1.6
class NameInline(admin.TabularInline):
model = LocationName
readonly_fields = ['nameUsed', 'namespace']
extra = 0
class LocationTypeInlineForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(LocationTypeInlineForm, self).__init__(*args, **kwargs)
self.fields['locationType'].queryset = Notation.objects.filter(concept__scheme__uri = TARGET_NAMESPACE_FT[0:-1] )
class LocationTypeInline(admin.StackedInline) :
model = Notation
form = LocationTypeInlineForm
class LocationAdmin(admin.ModelAdmin):
search_fields = ['locationType__term','locationname__name']
inlines = [
NameInline,
]
class NameFieldConfigInline(admin.TabularInline):
model = NameFieldConfig
extra = 1
class CodeFieldConfigInline(admin.TabularInline):
model = CodeFieldConfig
extra = 1
class LocationTypeFieldInline(admin.TabularInline):
model = LocationTypeField
class GazSourceConfigAdmin(admin.ModelAdmin):
model = GazSourceConfig
inlines = [
LocationTypeFieldInline, NameFieldConfigInline, CodeFieldConfigInline
]
admin.site.register(GazSource);
admin.site.register(GazSourceConfig,GazSourceConfigAdmin);
admin.site.register(Location, LocationAdmin);
admin.site.register(LocationName);
admin.site.register(LinkSet);
| from django.contrib import admin
from django import forms
from gazetteer.models import *
from skosxl.models import Notation
from .settings import TARGET_NAMESPACE_FT
# Register your models here.
# works for Dango > 1.6
class NameInline(admin.TabularInline):
model = LocationName
class LocationTypeInlineForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(LocationTypeInlineForm, self).__init__(*args, **kwargs)
self.fields['locationType'].queryset = Notation.objects.filter(concept__scheme__uri = TARGET_NAMESPACE_FT[0:-1] )
class LocationTypeInline(admin.StackedInline) :
model = Notation
form = LocationTypeInlineForm
class LocationAdmin(admin.ModelAdmin):
search_fields = ['locationType__term','locationname__name']
inlines = [
NameInline,
]
class NameFieldConfigInline(admin.TabularInline):
model = NameFieldConfig
extra = 1
class CodeFieldConfigInline(admin.TabularInline):
model = CodeFieldConfig
extra = 1
class LocationTypeFieldInline(admin.TabularInline):
model = LocationTypeField
class GazSourceConfigAdmin(admin.ModelAdmin):
model = GazSourceConfig
inlines = [
LocationTypeFieldInline, NameFieldConfigInline, CodeFieldConfigInline
]
admin.site.register(GazSource);
admin.site.register(GazSourceConfig,GazSourceConfigAdmin);
admin.site.register(Location, LocationAdmin);
admin.site.register(LocationName);
admin.site.register(LinkSet);
| Python | 0.000007 |
59da9b84c491fd5ca4f4c7add5891d5e9ee4b405 | Make it work with astor 0.5 for now | flaws/asttools.py | flaws/asttools.py | import ast
try:
from ast import arg as ast_arg
except ImportError:
ast_arg = type('arg', (ast.AST,), {})
from funcy.py3 import lmap
def is_write(node):
return isinstance(node, (ast.Import, ast.ImportFrom, ast.ExceptHandler,
ast.FunctionDef, ast.ClassDef, ast.arguments, ast_arg)) \
or isinstance(node.ctx, (ast.Store, ast.Del, ast.Param))
def is_read(node):
return isinstance(node, ast.Name) and isinstance(node.ctx, ast.Load)
def is_use(node):
return isinstance(node, ast.Name) \
and isinstance(node.ctx, (ast.Load, ast.Del))
def is_constant(node):
return isinstance(node, ast.Name) and node.id.isupper()
def is_param(node):
return isinstance(node, ast.Name) and isinstance(node.ctx, ast.Param) \
or isinstance(node, (ast.arguments, ast_arg))
def is_import(node):
return isinstance(node, (ast.Import, ast.ImportFrom))
def is_name(node, name):
return isinstance(node, ast.Name) and node.id == name
def ast_eval(node):
if isinstance(node, (ast.List, ast.Tuple)):
return lmap(ast_eval, node.elts)
elif isinstance(node, ast.Str):
return node.s
elif isinstance(node, ast.Num):
return node.n
else:
raise ValueError("Don't know how to eval %s" % node.__class__.__name__)
def name_class(node):
if isinstance(node, (ast.Import, ast.ImportFrom)):
return 'import'
elif isinstance(node, ast.FunctionDef):
return 'function'
elif isinstance(node, ast.ClassDef):
return 'class'
elif is_param(node):
return 'param'
else:
return 'variable'
def node_str(node):
return '%s at %d:%d' % (name_class(node), node.lineno, node.col_offset)
def nodes_str(nodes):
return '[%s]' % ', '.join(map(node_str, nodes))
# Parse to AST
import sys
import inspect
import textwrap
def get_body_ast(func):
return get_ast(func).body[0].body
def get_ast(func):
# Get function source
source = inspect.getsource(func)
source = textwrap.dedent(source)
# Preserve line numbers
source = '\n' * (func.__code__.co_firstlineno - 1) + source
return ast.parse(source, func_file(func), 'single')
def func_file(func):
return getattr(sys.modules[func.__module__], '__file__', '<nofile>')
# Code generation
try:
from astor.code_gen import SourceGenerator
except ImportError:
from astor.codegen import SourceGenerator
from termcolor import colored
def to_source(node, indent_with=' ' * 4, add_line_information=False):
"""
A modified to_source() function from astor.
"""
generator = AnnotatedSourceGenerator(indent_with, add_line_information)
generator.visit(node)
return ''.join(str(s) for s in generator.result)
class AnnotatedSourceGenerator(SourceGenerator):
def visit(self, node):
SourceGenerator.visit(self, node)
if not isinstance(node, (ast.Num, ast.Str)) and hasattr(node, 'val'):
self.write(colored(' (%s)' % node.val, 'green'))
| import ast
try:
from ast import arg as ast_arg
except ImportError:
ast_arg = type('arg', (ast.AST,), {})
from funcy.py3 import lmap
def is_write(node):
return isinstance(node, (ast.Import, ast.ImportFrom, ast.ExceptHandler,
ast.FunctionDef, ast.ClassDef, ast.arguments, ast_arg)) \
or isinstance(node.ctx, (ast.Store, ast.Del, ast.Param))
def is_read(node):
return isinstance(node, ast.Name) and isinstance(node.ctx, ast.Load)
def is_use(node):
return isinstance(node, ast.Name) \
and isinstance(node.ctx, (ast.Load, ast.Del))
def is_constant(node):
return isinstance(node, ast.Name) and node.id.isupper()
def is_param(node):
return isinstance(node, ast.Name) and isinstance(node.ctx, ast.Param) \
or isinstance(node, (ast.arguments, ast_arg))
def is_import(node):
return isinstance(node, (ast.Import, ast.ImportFrom))
def is_name(node, name):
return isinstance(node, ast.Name) and node.id == name
def ast_eval(node):
if isinstance(node, (ast.List, ast.Tuple)):
return lmap(ast_eval, node.elts)
elif isinstance(node, ast.Str):
return node.s
elif isinstance(node, ast.Num):
return node.n
else:
raise ValueError("Don't know how to eval %s" % node.__class__.__name__)
def name_class(node):
if isinstance(node, (ast.Import, ast.ImportFrom)):
return 'import'
elif isinstance(node, ast.FunctionDef):
return 'function'
elif isinstance(node, ast.ClassDef):
return 'class'
elif is_param(node):
return 'param'
else:
return 'variable'
def node_str(node):
return '%s at %d:%d' % (name_class(node), node.lineno, node.col_offset)
def nodes_str(nodes):
return '[%s]' % ', '.join(map(node_str, nodes))
# Parse to AST
import sys
import inspect
import textwrap
def get_body_ast(func):
return get_ast(func).body[0].body
def get_ast(func):
# Get function source
source = inspect.getsource(func)
source = textwrap.dedent(source)
# Preserve line numbers
source = '\n' * (func.__code__.co_firstlineno - 1) + source
return ast.parse(source, func_file(func), 'single')
def func_file(func):
return getattr(sys.modules[func.__module__], '__file__', '<nofile>')
# Code generation
from astor.code_gen import SourceGenerator
from termcolor import colored
def to_source(node, indent_with=' ' * 4, add_line_information=False):
"""
A modified to_source() function from astor.
"""
generator = AnnotatedSourceGenerator(indent_with, add_line_information)
generator.visit(node)
return ''.join(str(s) for s in generator.result)
class AnnotatedSourceGenerator(SourceGenerator):
def visit(self, node):
SourceGenerator.visit(self, node)
if not isinstance(node, (ast.Num, ast.Str)) and hasattr(node, 'val'):
self.write(colored(' (%s)' % node.val, 'green'))
| Python | 0 |
4ac7e5d15d3fba11ae37e5826ca6c7181539804b | Disable nested types tests affected by IMPALA-2295 | tests/query_test/test_nested_types.py | tests/query_test/test_nested_types.py | #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
import pytest
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestNestedTypes(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestNestedTypes, cls).add_test_dimensions()
cls.TestMatrix.add_constraint(lambda v:
v.get_value('table_format').file_format == 'parquet')
def test_scanner_basic(self, vector):
"""Queries that do not materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-basic', vector)
def test_scanner_array_materialization(self, vector):
"""Queries that materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-array-materialization', vector)
def test_scanner_multiple_materialization(self, vector):
"""Queries that materialize the same array multiple times."""
self.run_test_case('QueryTest/nested-types-scanner-multiple-materialization', vector)
def test_scanner_position(self, vector):
"""Queries that materialize the artifical position element."""
self.run_test_case('QueryTest/nested-types-scanner-position', vector)
def test_scanner_map(self, vector):
"""Queries that materialize maps. (Maps looks like arrays of key/value structs, so
most map functionality is already tested by the array tests.)"""
self.run_test_case('QueryTest/nested-types-scanner-maps', vector)
def test_runtime(self, vector):
"""Queries that send collections through the execution runtime."""
pytest.skip("IMPALA-2295")
self.run_test_case('QueryTest/nested-types-runtime', vector)
def test_tpch(self, vector):
"""Queries over the larger nested TPCH dataset."""
pytest.skip("IMPALA-2295")
# This test takes a long time (minutes), only run in exhaustive
if self.exploration_strategy() != 'exhaustive': pytest.skip()
self.run_test_case('QueryTest/nested-types-tpch', vector)
| #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
import pytest
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestNestedTypes(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestNestedTypes, cls).add_test_dimensions()
cls.TestMatrix.add_constraint(lambda v:
v.get_value('table_format').file_format == 'parquet')
def test_scanner_basic(self, vector):
"""Queries that do not materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-basic', vector)
def test_scanner_array_materialization(self, vector):
"""Queries that materialize arrays."""
self.run_test_case('QueryTest/nested-types-scanner-array-materialization', vector)
def test_scanner_multiple_materialization(self, vector):
"""Queries that materialize the same array multiple times."""
self.run_test_case('QueryTest/nested-types-scanner-multiple-materialization', vector)
def test_scanner_position(self, vector):
"""Queries that materialize the artifical position element."""
self.run_test_case('QueryTest/nested-types-scanner-position', vector)
def test_scanner_map(self, vector):
"""Queries that materialize maps. (Maps looks like arrays of key/value structs, so
most map functionality is already tested by the array tests.)"""
self.run_test_case('QueryTest/nested-types-scanner-maps', vector)
def test_runtime(self, vector):
"""Queries that send collections through the execution runtime."""
self.run_test_case('QueryTest/nested-types-runtime', vector)
def test_tpch(self, vector):
"""Queries over the larger nested TPCH dataset."""
# This test takes a long time (minutes), only run in exhaustive
if self.exploration_strategy() != 'exhaustive': pytest.skip()
self.run_test_case('QueryTest/nested-types-tpch', vector)
| Python | 0 |
f5f0cc6998f28bee7ccdaf304d3bc5e7e45ab9a6 | save memory allocation using kwarg `out`. | chainer/optimizer_hooks/gradient_hard_clipping.py | chainer/optimizer_hooks/gradient_hard_clipping.py | import chainer
from chainer import backend
class GradientHardClipping(object):
"""Optimizer/UpdateRule hook function for gradient clipping.
This hook function clips all gradient arrays to be within a lower and upper
bound.
Args:
lower_bound (float): The lower bound of the gradient value.
upper_bound (float): The upper bound of the gradient value.
Attributes:
~optimizer_hooks.GradientHardClipping.lower_bound (float): The
lower bound of the gradient value.
~optimizer_hooks.GradientHardClipping.upper_bound (float): The
upper bound of the gradient value.
~optimizer_hooks.GradientHardClipping.timing (string): Specifies
when this hook should be called by the
Optimizer/UpdateRule. Valid values are 'pre'
(before any updates) and 'post'
(after any updates).
~optimizer_hooks.GradientHardClipping.call_for_each_param (bool): \
Specifies if this hook is called for each parameter
(``True``) or only once (``False``) by an optimizer to
which this hook is registered. This function does
not expect users to switch the value from default one,
which is `True`.
.. versionadded:: 4.0.0
The *timing* parameter.
"""
name = 'GradientHardClipping'
call_for_each_param = True
timing = 'pre'
def __init__(self, lower_bound, upper_bound):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def __call__(self, rule, param):
grad = param.grad
if grad is None:
return
with chainer.using_device(param.device):
xp = param.device.xp
if xp == backend.chainerx \
or isinstance(param.grad, backend.intel64.mdarray):
param.grad = grad.clip(self.lower_bound, self.upper_bound)
else:
# Save on new object allocation when using numpy and cupy
# using kwarg `out`
xp.clip(grad, self.lower_bound, self.upper_bound, out=grad)
| import chainer
class GradientHardClipping(object):
"""Optimizer/UpdateRule hook function for gradient clipping.
This hook function clips all gradient arrays to be within a lower and upper
bound.
Args:
lower_bound (float): The lower bound of the gradient value.
upper_bound (float): The upper bound of the gradient value.
Attributes:
~optimizer_hooks.GradientHardClipping.lower_bound (float): The
lower bound of the gradient value.
~optimizer_hooks.GradientHardClipping.upper_bound (float): The
upper bound of the gradient value.
~optimizer_hooks.GradientHardClipping.timing (string): Specifies
when this hook should be called by the
Optimizer/UpdateRule. Valid values are 'pre'
(before any updates) and 'post'
(after any updates).
~optimizer_hooks.GradientHardClipping.call_for_each_param (bool): \
Specifies if this hook is called for each parameter
(``True``) or only once (``False``) by an optimizer to
which this hook is registered. This function does
not expect users to switch the value from default one,
which is `True`.
.. versionadded:: 4.0.0
The *timing* parameter.
"""
name = 'GradientHardClipping'
call_for_each_param = True
timing = 'pre'
def __init__(self, lower_bound, upper_bound):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def __call__(self, rule, param):
grad = param.grad
if grad is None:
return
with chainer.using_device(param.device):
param.grad = param.grad.clip(self.lower_bound, self.upper_bound)
| Python | 0 |
8c17d2076d54864094c3cd8ee51d514bc806c913 | bump version | flexx/__init__.py | flexx/__init__.py | """
`Flexx <https://flexx.readthedocs.io>`_ is a pure Python toolkit for
creating graphical user interfaces (GUI's), that uses web technology
for its rendering. Apps are written purely in Python; The
`PScript <https://pscript.readthedocs.io>`_ transpiler generates the
necessary JavaScript on the fly.
You can use Flexx to create (cross platform) desktop applications, web
applications, and export an app to a standalone HTML document. It also
works in the Jupyter notebook.
The docs are on `Readthedocs <http://flexx.readthedocs.io>`_,
the code is on `Github <http://github.com/flexxui/flexx>`_,
and there is a `demo server <http://demo.flexx.app>`_.
Once you've got started, the most important page is probably the
:doc:`Widget reference <ui/api>`.
----
For more information, see http://flexx.readthedocs.io.
"""
# NOTES ON DOCS:
# There are 2 places that define the short summary of Flexx: the
# __init__.py and the README.md. Their summaries should be kept equal.
# The index.rst for the docs uses the summary from __init__.py (the
# part after the "----" is stripped. The long-description for Pypi is
# obtained by converting README.md to RST.
__version__ = '0.8.0'
# Assert compatibility
import sys
if sys.version_info < (3, 5): # pragma: no cover
raise RuntimeError('Flexx needs at least Python 3.5')
# Import config object
from ._config import config # noqa
from .util.logging import set_log_level # noqa
set_log_level(config.log_level)
del sys
| """
`Flexx <https://flexx.readthedocs.io>`_ is a pure Python toolkit for
creating graphical user interfaces (GUI's), that uses web technology
for its rendering. Apps are written purely in Python; The
`PScript <https://pscript.readthedocs.io>`_ transpiler generates the
necessary JavaScript on the fly.
You can use Flexx to create (cross platform) desktop applications, web
applications, and export an app to a standalone HTML document. It also
works in the Jupyter notebook.
The docs are on `Readthedocs <http://flexx.readthedocs.io>`_,
the code is on `Github <http://github.com/flexxui/flexx>`_,
and there is a `demo server <http://demo.flexx.app>`_.
Once you've got started, the most important page is probably the
:doc:`Widget reference <ui/api>`.
----
For more information, see http://flexx.readthedocs.io.
"""
# NOTES ON DOCS:
# There are 2 places that define the short summary of Flexx: the
# __init__.py and the README.md. Their summaries should be kept equal.
# The index.rst for the docs uses the summary from __init__.py (the
# part after the "----" is stripped. The long-description for Pypi is
# obtained by converting README.md to RST.
__version__ = '0.7.1'
# Assert compatibility
import sys
if sys.version_info < (3, 5): # pragma: no cover
raise RuntimeError('Flexx needs at least Python 3.5')
# Import config object
from ._config import config # noqa
from .util.logging import set_log_level # noqa
set_log_level(config.log_level)
del sys
| Python | 0 |