code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
import io
import os
from dlstats.fetchers.bea import BEA as Fetcher
import httpretty
from dlstats.tests.base import RESOURCES_DIR as BASE_RESOURCES_DIR
from dlstats.tests.fetchers.base import BaseFetcherTestCase
import unittest
from unittest import mock
RESOURCES_DIR = os.path.abspath(os.path.join(BASE_RESOURCES_DIR, "bea"))
DATA_BEA_10101_An = {
"filepath": os.path.abspath(os.path.join(RESOURCES_DIR, "nipa-section1.xls.zip")),
"DSD": {
"provider": "BEA",
"filepath": None,
"dataset_code": "nipa-section1-10101-a",
"dsd_id": "nipa-section1-10101-a",
"is_completed": True,
"categories_key": "nipa-section1",
"categories_parents": ["national", "nipa"],
"categories_root": ["national", "nipa", "nipa-fa2004", "nipa-underlying"],
"concept_keys": ['concept', 'frequency'],
"codelist_keys": ['concept', 'frequency'],
"codelist_count": {
"concept": 25,
"frequency": 1
},
"dimension_keys": ['concept', 'frequency'],
"dimension_count": {
"concept": 25,
"frequency": 1
},
"attribute_keys": [],
"attribute_count": None,
},
"series_accept": 25,
"series_reject_frequency": 0,
"series_reject_empty": 0,
"series_all_values": 1175,
"series_key_first": "A191RL1-A",
"series_key_last": "A191RP1-A",
"series_sample": {
'provider_name': 'BEA',
'dataset_code': 'nipa-section1-10101-a',
'key': 'A191RL1-A',
'name': 'Gross domestic product - Annually',
'frequency': 'A',
'last_update': None,
'first_value': {
'value': '3.1',
'period': '1969',
'attributes': None,
},
'last_value': {
'value': '2.4',
'period': '2015',
'attributes': None,
},
'dimensions': {
'concept': 'a191rl1',
"frequency": 'a'
},
'attributes': None,
}
}
def _get_datasets_settings(self):
return {
"nipa-section1-10101-a": {
'dataset_code': 'nipa-section1-10101-a',
'name': 'Table 1.1.1. Percent Change From Preceding Period in Real Gross Domestic Product - Annually',
'last_update': None,
'metadata': {
'filename': 'nipa-section1.xls.zip',
'sheet_name': '10101 Ann',
'url': 'http://www.bea.gov/national/nipaweb/GetCSV.asp?GetWhat=SS_Data/Section1All_xls.zip&Section=2'
},
}
}
class FetcherTestCase(BaseFetcherTestCase):
# nosetests -s -v dlstats.tests.fetchers.test_bea:FetcherTestCase
FETCHER_KLASS = Fetcher
DATASETS = {
'nipa-section1-10101-a': DATA_BEA_10101_An
}
DATASET_FIRST = "nipa-fa2004-section1-101-a"
DATASET_LAST = "nipa-underlying-section9-90500U-a"
DEBUG_MODE = False
def _load_files(self, dataset_code):
url = "http://www.bea.gov/national/nipaweb/GetCSV.asp?GetWhat=SS_Data/Section1All_xls.zip&Section=2"
self.register_url(url,
self.DATASETS[dataset_code]["filepath"])
@httpretty.activate
@unittest.skipUnless('FULL_TEST' in os.environ, "Skip - no full test")
def test_load_datasets_first(self):
dataset_code = "nipa-section1-10101-a"
self._load_files(dataset_code)
self.assertLoadDatasetsFirst([dataset_code])
@httpretty.activate
@unittest.skipUnless('FULL_TEST' in os.environ, "Skip - no full test")
def test_load_datasets_update(self):
dataset_code = "nipa-section1-10101-a"
self._load_files(dataset_code)
self.assertLoadDatasetsUpdate([dataset_code])
#@httpretty.activate
@unittest.skipIf(True, "TODO")
def test_build_data_tree(self):
dataset_code = "nipa-section1-10101-a"
self.assertDataTree(dataset_code)
@httpretty.activate
@mock.patch("dlstats.fetchers.bea.BEA._get_datasets_settings", _get_datasets_settings)
def test_upsert_dataset_10101(self):
# nosetests -s -v dlstats.tests.fetchers.test_bea:FetcherTestCase.test_upsert_dataset_10101
dataset_code = "nipa-section1-10101-a"
self._load_files(dataset_code)
self.assertProvider()
dataset = self.assertDataset(dataset_code)
names = {
'a191rl1': 'Gross domestic product',
'dpcerl1': 'Personal consumption expenditures',
'dgdsrl1': 'Personal consumption expenditures - Goods',
'ddurrl1': 'Personal consumption expenditures - Goods - Durable goods',
'dndgrl1': 'Personal consumption expenditures - Goods - Nondurable goods',
'dserrl1': 'Personal consumption expenditures - Services',
'a006rl1': 'Gross private domestic investment',
'a007rl1': 'Gross private domestic investment - Fixed investment',
'a008rl1': 'Gross private domestic investment - Fixed investment - Nonresidential',
'y033rl1': 'Gross private domestic investment - Fixed investment - Nonresidential - Equipment',
'a011rl1': 'Gross private domestic investment - Fixed investment - Residential',
'a020rl1': 'Net exports of goods and services - Exports',
'a191rp1': 'Addendum: - Gross domestic product, current dollars'
}
for k, v in names.items():
self.assertTrue(k in dataset["codelists"]["concept"])
self.assertEquals(dataset["codelists"]["concept"][k], v)
series_list = self.assertSeries(dataset_code)
series_keys = {s["key"].lower(): s for s in series_list}
for k, v in names.items():
search_k = "%s-a" % k
search_name = "%s - Annually" % v
self.assertTrue(search_k in series_keys, "%s not in series_keys" % search_k)
self.assertEquals(series_keys[search_k]["name"], search_name)
for series in series_list:
self.assertEquals(series["last_update_ds"], dataset["last_update"])
| Widukind/dlstats | dlstats/tests/fetchers/test_bea.py | Python | agpl-3.0 | 6,159 |
# -*- coding: utf-8 -*-
import os
import shutil
import sys
import datetime
from invoke import task
from invoke.util import cd
from pelican.server import ComplexHTTPRequestHandler, RootedHTTPServer
CONFIG = {
# Local path configuration (can be absolute or relative to tasks.py)
'deploy_path': '..',
# Github Pages configuration
'github_pages_branch': 'gh-pages',
'commit_message': "'Publish site on {}'".format(datetime.date.today().isoformat()),
# Port for `serve`
'port': 8000,
}
@task
def clean(c):
"""Remove generated files"""
if os.path.isdir(CONFIG['deploy_path']):
shutil.rmtree(CONFIG['deploy_path'])
os.makedirs(CONFIG['deploy_path'])
@task
def build(c):
"""Build local version of site"""
c.run('pelican -s pelicanconf.py')
@task
def rebuild(c):
"""`build` with the delete switch"""
c.run('pelican -d -s pelicanconf.py')
@task
def regenerate(c):
"""Automatically regenerate site upon file modification"""
c.run('pelican -r -s pelicanconf.py')
@task
def serve(c):
"""Serve site at http://localhost:8000/"""
class AddressReuseTCPServer(RootedHTTPServer):
allow_reuse_address = True
server = AddressReuseTCPServer(
CONFIG['deploy_path'],
('', CONFIG['port']),
ComplexHTTPRequestHandler)
sys.stderr.write('Serving on port {port} ...\n'.format(**CONFIG))
server.serve_forever()
@task
def reserve(c):
"""`build`, then `serve`"""
build(c)
serve(c)
@task
def preview(c):
"""Build production version of site"""
c.run('pelican -s publishconf.py')
@task
def publish(c):
"""Publish to production via rsync"""
c.run('pelican -s publishconf.py')
c.run(
'rsync --delete --exclude ".DS_Store" -pthrvz -c '
'{} {production}:{dest_path}'.format(
CONFIG['deploy_path'].rstrip('/') + '/',
**CONFIG))
@task
def gh_pages(c):
"""Publish to GitHub Pages"""
preview(c)
c.run('ghp-import -b {github_pages_branch} '
'-m {commit_message} '
'{deploy_path} -p'.format(**CONFIG))
| webcamoid/webcamoid.github.io | internal/tasks.py | Python | agpl-3.0 | 2,105 |
#!/usr/bin/env python
from __future__ import print_function, division
import multiprocessing
import os
import csv
import datetime
import logging
from datetime import datetime
import argparse
import shutil
import math
from glob import glob
import gzip
from shi7 import __version__
from shi7.shi7 import TRUE_FALSE_DICT, read_fastq, axe_adaptors_single_end, axe_adaptors_paired_end, flash_part1, \
flash_part2, split_fwd_rev, match_pairs, link_manicured_names
def make_arg_parser():
parser = argparse.ArgumentParser(description='This is the commandline interface for shi7_learning',
usage='shi7_learning v{version}\nshi7_learning.py -i <input> -o <output> ...'.format(version=__version__))
parser.add_argument('-i', '--input', help='Set the directory path of the fastq directory OR oligos.txt if splitting', required=True)
parser.add_argument('-o', '--output', help='Set the directory path of the output (default: cwd)', default=os.getcwd())
parser.add_argument('--debug', help='Retain all intermediate files (default: Disabled)', dest='debug', action='store_true')
parser.add_argument('-t', '--threads', help='Set the number of threads (default: %(default)s)',
default=min(multiprocessing.cpu_count(), 16))
parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + __version__)
parser.set_defaults()
return parser
def subsample_fastqs(path_fastqs, num_files=10, num_sequences=1000):
for i, path_fastq in enumerate(path_fastqs):
if i >= num_files:
return
with open(path_fastq) as fastq_inf:
fastq_gen = read_fastq(fastq_inf)
yield limit_fastq(fastq_gen, num_sequences=num_sequences)
def limit_fastq(fastq_gen, num_sequences=1000):
for i in range(num_sequences):
try:
yield next(fastq_gen)
except StopIteration:
return
def get_seq_length_qual_scores(path_fastqs, output_path, num_files=10, num_sequences=1000):
subsampled_fastqs = subsample_fastqs(path_fastqs, num_files=num_files, num_sequences=num_sequences)
sequence_len_sum = 0.
quality_sum = 0
num_sequences = 0.
for fastq_path, fastq_gen in zip(path_fastqs, subsampled_fastqs):
with open(os.path.join(output_path, os.path.basename(fastq_path)), 'w') as outf:
for header, sequence, quality in fastq_gen:
outf.write("@%s\n%s\n+\n%s\n" % (header, sequence, quality))
sequence_len_sum += len(sequence)
quality_sum += sum([ord(i) for i in quality])
num_sequences += 1.
# Return (average length of sequences, average quality score)
return sequence_len_sum/num_sequences, quality_sum/sequence_len_sum
def count_num_lines(path):
with open(path) as path_inf:
return sum(1 for line in path_inf)
def get_file_size(path):
return os.path.getsize(path)
def check_sequence_name(path_R1, path_R2):
with open(path_R1) as path_inf_R1, open(path_R2) as path_inf_R2:
fastq_gen_R1 = read_fastq(path_inf_R1)
fastq_gen_R2 = read_fastq(path_inf_R2)
for gen_R1, gen_R2 in zip(fastq_gen_R1,fastq_gen_R2):
title_R1, title_R2 = gen_R1[0], gen_R2[0]
if len(title_R1) != len(title_R2):
return False
diff_idx = [i for i in range(len(title_R1)) if title_R1[i] != title_R2[i]]
if len(diff_idx) != 1:
return False
if int(title_R2[diff_idx[0]]) - int(title_R1[diff_idx[0]]) != 1:
return False
return True
def detect_paired_end(path_fastqs):
path_fastqs = [f for f in path_fastqs if f.endswith('.fastq') or f.endswith('.fq') or f.endswith('.fastq.gz') or f.endswith('.fq.gz')]
if len(path_fastqs) % 2 == 1: return False, [path_fastqs, None, None, None]
pair_obj = match_pairs(path_fastqs, True)
path_fastqs = pair_obj[0]
if pair_obj[1]==None: return False, pair_obj
return True, pair_obj
def get_directory_size(path):
return sum([get_file_size(os.path.join(path, fastq)) for fastq in os.listdir(path)])
def remove_directory_contents(path):
for f in os.listdir(path):
os.remove(os.path.join(path, f))
def choose_axe_adaptors(path_subsampled_fastqs, paired_end, output_path, threads):
adapters = ['TruSeq2', 'TruSeq3', 'TruSeq3-2', 'Nextera']
threads = min(threads, multiprocessing.cpu_count(), 16)
original_size = get_directory_size(os.path.dirname(path_subsampled_fastqs[0]))
logging.info('Original size of the subsampled_fastqs = ' + str(original_size))
best_size = original_size
best_adap = None
for adapter in adapters:
if paired_end:
axe_adaptors_paired_end(path_subsampled_fastqs, output_path, adapter, threads, shell=False)
else:
axe_adaptors_single_end(path_subsampled_fastqs, output_path, adapter, threads, shell=False)
fastqs_path_size = get_directory_size(output_path)
logging.info("Adapters: {adapter}\tFile Size: {filesize}".format(adapter=adapter, filesize=fastqs_path_size))
if fastqs_path_size <= best_size:
best_size = fastqs_path_size
best_adap = adapter
if best_size < 0.995*original_size:
# Actually write the best files again for use in later steps
logging.info("Best Adapters: {adapter}\tFile Size: {filesize}".format(adapter=best_adap, filesize=best_size))
if paired_end:
files = axe_adaptors_paired_end(path_subsampled_fastqs, output_path, best_adap, threads, shell=False)
else:
files = axe_adaptors_single_end(path_subsampled_fastqs, output_path, best_adap, threads, shell=False)
return best_adap, best_size, files
else:
return None, original_size, path_subsampled_fastqs
def flash_stitchable_and_check_outies(adapter_output_filenames, flash_output_path, threads):
flash_output_str = flash_part1(adapter_output_filenames, flash_output_path, max_overlap=700, \
min_overlap=10, allow_outies=True, threads=threads, shell=False)
allow_outies_count = 0
for flash_out in flash_output_str:
flash_str_list = flash_out.strip().split('\n')
outies_info = flash_str_list[-8]
outies_percent = float(outies_info[outies_info.find('(')+1:outies_info.find('%')])
if outies_percent >= 15:
allow_outies_count += 1
path_flash_fqs = flash_part2(flash_output_str, flash_output_path)
path_R1_fastqs, _ = split_fwd_rev(adapter_output_filenames)
matched_count = 0
for original_fq, flash_fq in zip(path_R1_fastqs, path_flash_fqs):
if count_num_lines(flash_fq) > count_num_lines(original_fq)*0.3:
matched_count = matched_count + 1
return matched_count/len(path_flash_fqs) >= 0.75, allow_outies_count/len(flash_output_str) >= 0.75, path_flash_fqs
def flash_check_cv(flash_output_path):
hist_files = [os.path.join(flash_output_path, f) for f in os.listdir(flash_output_path) if f.endswith('.hist')]
total_cv = total_mean = 0
for f in hist_files:
with open(f) as inf:
csv_inf = csv.reader(inf, delimiter="\t")
x2f = 0
sum = 0
cnt = 0
for row in csv_inf:
row = [int(r) for r in row]
cnt = cnt + row[1]
sum = sum + row[0] * row[1]
x2f = x2f + row[0] * row[0] * row[1]
mean = sum/cnt
std = math.sqrt((x2f - sum*sum/cnt)/(cnt-1))
cv = std/mean
total_cv = total_cv + cv
total_mean = total_mean + mean
total_files = len(hist_files)
return total_cv/total_files, total_mean/total_files
def trimmer_learning(flash_output_filenames):
filter_q_sum = 0
trim_q_sum = 0
totbases = 0
tottrim = 0
num = 0
for fq_path in flash_output_filenames:
with open(fq_path) as fq_inf:
fq_gen = read_fastq(fq_inf)
for gen in fq_gen:
num = num + 1
qualities = gen[2]
totbases = totbases + len(qualities)
qualities = [ord(qual)-33 for qual in qualities]
filter_q_sum = filter_q_sum + sum(qualities)
if (len(qualities) >= 20):
trim_q_sum = trim_q_sum + sum(qualities[:10]) + sum(qualities[-10:])
tottrim = tottrim + 20
logging.info('num seqs: %d' % num)
logging.info('filter_q_sum: %d' % filter_q_sum)
logging.info('trim_q_sum: %d' % trim_q_sum)
logging.info('total bases considered: %d (trim: %d)' % (totbases, tottrim))
logging.info('filter_q: %d' % (filter_q_sum/totbases))
logging.info('trim_q: %d' % (trim_q_sum/tottrim))
filter_q = math.floor(filter_q_sum/totbases)
trim_q = math.floor(trim_q_sum/tottrim)-1
trim_q = trim_q if trim_q > filter_q - 3 else filter_q - 3
return filter_q, trim_q
def template_input(input):
input = os.path.abspath(input)
# input, input_cmd
return "input\t{}".format(input), ["--input", input]
def template_paired_end(bool):
# bool, paired_end
if bool:
return "paired_end\t{}".format(str(bool)), None
else:
return "paired_end\t{}".format(str(bool)), ["-SE"]
def template_trim(filt_q, trim_q):
return "filt_q: %d, trim_q: %d" % (filt_q, trim_q), ["--filter_qual", str(filt_q), "--trim_qual", str(trim_q)]
def template_cv(minstitch, maxstitch):
return "minstitch: %d, maxstitch: %d" % (minstitch, maxstitch), ["--min_overlap", str(minstitch), "--max_overlap", str(maxstitch)]
def template_output(output):
# output, output_cmd
output = os.path.abspath(output)
return "output\t{}".format(output), ["--output", output]
def template_choose_axe_adaptors(best_adapt, best_size):
if best_adapt:
return "axe_adaptors\t" + best_adapt, ["--adaptor", best_adapt]
else:
return "axe_adaptors\tNA", ["--adaptor", "None"]
def template_flash(stitches, do_outies):
return "stitches: %s, outies: %s" % (stitches, do_outies), ["--flash", str(stitches), "--allow_outies", str(do_outies)]
def main():
start_time = datetime.now()
parser = make_arg_parser()
args = parser.parse_args()
learning_params = ["shi7.py"]
learning_pretty = ["SHI7 version", __version__]
input = os.path.abspath(args.input)
output = os.path.abspath(args.output)
# Make output folder
if not os.path.exists(output):
os.makedirs(output)
# Put in the logging file
logging.basicConfig(filename=os.path.join(output, 'shi7_learning.log'), filemode='w', level=logging.DEBUG, \
format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
# Make temp outfolder
if os.path.exists(os.path.join(args.output, 'temp')):
shutil.rmtree(os.path.join(args.output, 'temp'))
logging.info('Existing temp directory deleted.')
os.makedirs(os.path.join(args.output, 'temp'))
else:
os.makedirs(os.path.join(args.output, 'temp'))
path_fastqs = [os.path.join(input, f) for f in os.listdir(input) if f.endswith('fastq') or f.endswith('fq') or f.endswith('fq.gz') or f.endswith('fastq.gz')]
if len(path_fastqs) == 0:
msg = "No FASTQS found in input folder {}".format(input)
logging.critical(msg)
raise IOError(msg)
# Record the input
results, addon = template_input(input)
logging.info(results)
if addon:
learning_params.extend(addon)
# Write temp subsampled fastqs
subsampled_fastq_path = os.path.join(output, 'temp', 'subsampled')
os.makedirs(subsampled_fastq_path)
totbases = totseqs = 0
for file in path_fastqs:
basename = os.path.basename(file)
if(file.endswith('.fastq') or file.endswith('.fq')):
fastq_inf = open(file)
else:
fastq_inf = gzip.open(file, 'rt')
fastq_gen = read_fastq(fastq_inf)
if(basename.endswith('.gz')):
basename = basename[:-3]
with open(os.path.join(subsampled_fastq_path, basename), 'w') as outf:
for header, seq, quality in limit_fastq(fastq_gen):
outf.write("@{header}\n{seq}\n+\n{quality}\n".format(header=header, seq=seq, quality=quality))
totbases += len(seq)
totseqs += 1
avlen = totbases/totseqs
path_fastqs = glob(os.path.join(subsampled_fastq_path , "*"))
# Detect if paired end
paired_end, pair_obj = detect_paired_end(path_fastqs)
path_fastqs = pair_obj[0]
link_outdir = os.path.join(output, 'temp', 'link')
os.makedirs(link_outdir)
snames = [os.path.basename(n) for n in path_fastqs]
path_fastqs = link_manicured_names(path_fastqs, snames, link_outdir, not paired_end, pair_obj[1:])
results, addon = template_paired_end(paired_end)
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Paired end",paired_end]
# Detect adapters
axe_adaptors_path = os.path.join(output, 'temp', 'axe_adaptors')
os.makedirs(axe_adaptors_path)
best_adap, best_size, fastq_paths = choose_axe_adaptors(path_fastqs, paired_end, axe_adaptors_path, int(args.threads))
results, addon = template_choose_axe_adaptors(best_adap, best_size)
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Detected adaptors",best_adap]
# Detect output folder
results, addon = template_output(output)
logging.info(results)
if addon: learning_params.extend(addon)
# Detect stitching
stitched_path = os.path.join(output, 'temp', 'flash')
os.makedirs(stitched_path)
if paired_end:
stitches, do_outies, fastq_paths = flash_stitchable_and_check_outies(fastq_paths, stitched_path, int(args.threads))
else: stitches, do_outies = False, False
results, addon = template_flash(stitches, do_outies)
logging.info(results)
if addon: learning_params.extend(addon)
if paired_end:
learning_pretty += ["Stitching",stitches]
if stitches: learning_pretty += ["Outies allowed",do_outies]
filt_q, trim_q = trimmer_learning(fastq_paths)
results, addon = template_trim(int(filt_q), int(trim_q))
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Filter quality",filt_q,"Trimming quality",trim_q]
# Check whether to implement stitching bounds
if stitches:
cv, mean = flash_check_cv(stitched_path)
if cv < 0.1:
learning_pretty += ["Amplicon mode",True]
logging.info("CV: %f, Mean: %f, Avlen: %f" % (cv, mean, avlen))
if avlen > mean: avlen = mean
mr = math.ceil(cv*mean)
logging.info("SD was: %d" % mr)
minstitch, maxstitch = int(2*avlen - mean-mr), int(2*avlen - mean+mr)
if minstitch < 8: minstitch = 8
logging.info("Amplicon mode: stitch range [%d, %d]" % (minstitch, maxstitch))
results, addon = template_cv(minstitch, maxstitch)
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Amplicon stitch minimum",minstitch]
learning_pretty += ["Amplicon stitch maximum",maxstitch]
else: learning_pretty += ["Amplicon mode",False]
#print(str(learning_params))
with open(os.path.join(args.output, "shi7_cmd.sh"), "w") as output:
cmd = " ".join(learning_params)
output.write(cmd)
print(cmd)
with open(os.path.join(args.output, "learning_params.txt"),"w") as output:
for ix in range(0,len(learning_pretty),2):
output.write(str(learning_pretty[ix]) + "\t" + str(learning_pretty[ix+1]) + "\n")
if not args.debug:
shutil.rmtree(os.path.join(args.output, 'temp'))
logging.info('Execution time: %s' % (datetime.now() - start_time))
if __name__ == "__main__":
main()
| knights-lab/shi7 | shi7/shi7_learning.py | Python | agpl-3.0 | 15,977 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: OpenDrive Ltda
# Copyright (c) 2013 Opendrive Ltda
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.tools.translate import _
class Partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'legal_representative': fields.char(
'Legal Representative',
),
}
| kailIII/emaresa | rent.resp/partner.py | Python | agpl-3.0 | 1,548 |
# -*- coding: utf-8 -*-
import time
from datetime import timedelta
class CookieJar:
def __init__(self, pluginname, account=None):
self.cookies = {}
self.plugin = pluginname
self.account = account
def add_cookies(self, clist):
for c in clist:
name = c.split("\t")[5]
self.cookies[name] = c
def get_cookies(self):
return list(self.cookies.values())
def parse_cookie(self, name):
if name in self.cookies:
return self.cookies[name].split("\t")[6]
else:
return None
def get_cookie(self, name):
return self.parse_cookie(name)
def set_cookie(
self,
domain,
name,
value,
path="/",
exp=time.time() + timedelta(hours=744).total_seconds(), #: 31 days retention
):
self.cookies[
name
] = f".{domain}\tTRUE\t{path}\tFALSE\t{exp}\t{name}\t{value}"
def clear(self):
self.cookies = {}
| vuolter/pyload | src/pyload/core/network/cookie_jar.py | Python | agpl-3.0 | 1,007 |
"""
Test scenarios for the review xblock.
"""
import ddt
import unittest
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from nose.plugins.attrib import attr
from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory
from lms.djangoapps.courseware.tests.helpers import LoginEnrollmentTestCase
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from review import get_review_ids
import crum
class TestReviewXBlock(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Create the test environment with the review xblock.
"""
STUDENTS = [
{'email': 'learner@test.com', 'password': 'foo'},
]
XBLOCK_NAMES = ['review']
URL_BEGINNING = settings.LMS_ROOT_URL + \
'/xblock/block-v1:DillonX/DAD101x_review/3T2017+type@'
@classmethod
def setUpClass(cls):
# Nose runs setUpClass methods even if a class decorator says to skip
# the class: https://github.com/nose-devs/nose/issues/946
# So, skip the test class here if we are not in the LMS.
if settings.ROOT_URLCONF != 'lms.urls':
raise unittest.SkipTest('Test only valid in lms')
super(TestReviewXBlock, cls).setUpClass()
# Set up for the actual course
cls.course_actual = CourseFactory.create(
display_name='Review_Test_Course_ACTUAL',
org='DillonX',
number='DAD101x',
run='3T2017'
)
# There are multiple sections so the learner can load different
# problems, but should only be shown review problems from what they have loaded
with cls.store.bulk_operations(cls.course_actual.id, emit_signals=False):
cls.chapter_actual = ItemFactory.create(
parent=cls.course_actual, display_name='Overview'
)
cls.section1_actual = ItemFactory.create(
parent=cls.chapter_actual, display_name='Section 1'
)
cls.unit1_actual = ItemFactory.create(
parent=cls.section1_actual, display_name='New Unit 1'
)
cls.xblock1_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 1'
)
cls.xblock2_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 2'
)
cls.xblock3_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 3'
)
cls.xblock4_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 4'
)
cls.section2_actual = ItemFactory.create(
parent=cls.chapter_actual, display_name='Section 2'
)
cls.unit2_actual = ItemFactory.create(
parent=cls.section2_actual, display_name='New Unit 2'
)
cls.xblock5_actual = ItemFactory.create(
parent=cls.unit2_actual,
category='problem',
display_name='Problem 5'
)
cls.section3_actual = ItemFactory.create(
parent=cls.chapter_actual, display_name='Section 3'
)
cls.unit3_actual = ItemFactory.create(
parent=cls.section3_actual, display_name='New Unit 3'
)
cls.xblock6_actual = ItemFactory.create(
parent=cls.unit3_actual,
category='problem',
display_name='Problem 6'
)
cls.course_actual_url = reverse(
'courseware_section',
kwargs={
'course_id': unicode(cls.course_actual.id),
'chapter': 'Overview',
'section': 'Welcome',
}
)
# Set up for the review course where the review problems are hosted
cls.course_review = CourseFactory.create(
display_name='Review_Test_Course_REVIEW',
org='DillonX',
number='DAD101x_review',
run='3T2017'
)
with cls.store.bulk_operations(cls.course_review.id, emit_signals=True):
cls.chapter_review = ItemFactory.create(
parent=cls.course_review, display_name='Overview'
)
cls.section_review = ItemFactory.create(
parent=cls.chapter_review, display_name='Welcome'
)
cls.unit1_review = ItemFactory.create(
parent=cls.section_review, display_name='New Unit 1'
)
cls.xblock1_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 1'
)
cls.xblock2_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 2'
)
cls.xblock3_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 3'
)
cls.xblock4_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 4'
)
cls.unit2_review = ItemFactory.create(
parent=cls.section_review, display_name='New Unit 2'
)
cls.xblock5_review = ItemFactory.create(
parent=cls.unit2_review,
category='problem',
display_name='Problem 5'
)
cls.unit3_review = ItemFactory.create(
parent=cls.section_review, display_name='New Unit 3'
)
cls.xblock6_review = ItemFactory.create(
parent=cls.unit3_review,
category='problem',
display_name='Problem 6'
)
cls.course_review_url = reverse(
'courseware_section',
kwargs={
'course_id': unicode(cls.course_review.id),
'chapter': 'Overview',
'section': 'Welcome',
}
)
def setUp(self):
super(TestReviewXBlock, self).setUp()
for idx, student in enumerate(self.STUDENTS):
username = 'u{}'.format(idx)
self.create_account(username, student['email'], student['password'])
self.activate_user(student['email'])
self.staff_user = GlobalStaffFactory()
def enroll_student(self, email, password, course):
"""
Student login and enroll for the course
"""
self.login(email, password)
self.enroll(course, verify=True)
@attr(shard=1)
@ddt.ddt
class TestReviewFunctions(TestReviewXBlock):
"""
Check that the essential functions of the Review xBlock work as expected.
Tests cover the basic process of receiving a hint, adding a new hint,
and rating/reporting hints.
"""
def test_no_review_problems(self):
"""
If a user has not seen any problems, they should
receive a response to go out and try more problems so they have
material to review.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
with self.store.bulk_operations(self.course_actual.id, emit_signals=False):
review_section_actual = ItemFactory.create(
parent=self.chapter_actual, display_name='Review Subsection'
)
review_unit_actual = ItemFactory.create(
parent=review_section_actual, display_name='Review Unit'
)
review_xblock_actual = ItemFactory.create( # pylint: disable=unused-variable
parent=review_unit_actual,
category='review',
display_name='Review Tool'
)
# Loading the review section
response = self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': review_section_actual.location.name,
}
))
expected_h2 = 'Nothing to review'
self.assertIn(expected_h2, response.content)
@ddt.data(5, 7)
def test_too_few_review_problems(self, num_desired):
"""
If a user does not have enough problems to review, they should
receive a response to go out and try more problems so they have
material to review.
Testing loading 4 problems and asking for 5 and then loading every
problem and asking for more than that.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Want to load fewer problems than num_desired
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
if num_desired > 6:
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section2_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
with self.store.bulk_operations(self.course_actual.id, emit_signals=False):
review_section_actual = ItemFactory.create(
parent=self.chapter_actual, display_name='Review Subsection'
)
review_unit_actual = ItemFactory.create(
parent=review_section_actual, display_name='Review Unit'
)
review_xblock_actual = ItemFactory.create( # pylint: disable=unused-variable
parent=review_unit_actual,
category='review',
display_name='Review Tool',
num_desired=num_desired
)
# Loading the review section
response = self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': review_section_actual.location.name,
}
))
expected_h2 = 'Nothing to review'
self.assertIn(expected_h2, response.content)
@ddt.data(2, 6)
def test_review_problems(self, num_desired):
"""
If a user has enough problems to review, they should
receive a response where there are review problems for them to try.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has enough problems in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section2_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
with self.store.bulk_operations(self.course_actual.id, emit_signals=False):
review_section_actual = ItemFactory.create(
parent=self.chapter_actual, display_name='Review Subsection'
)
review_unit_actual = ItemFactory.create(
parent=review_section_actual, display_name='Review Unit'
)
review_xblock_actual = ItemFactory.create( # pylint: disable=unused-variable
parent=review_unit_actual,
category='review',
display_name='Review Tool',
num_desired=num_desired
)
# Loading the review section
response = self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': review_section_actual.location.name,
}
))
expected_header_text = 'Review Problems'
# The problems are defaulted to correct upon load
# This happens because the problems "raw_possible" field is 0 and the
# "raw_earned" field is also 0.
expected_correctness_text = 'correct'
expected_problems = ['Review Problem 1', 'Review Problem 2', 'Review Problem 3',
'Review Problem 4', 'Review Problem 5', 'Review Problem 6']
self.assertIn(expected_header_text, response.content)
self.assertEqual(response.content.count(expected_correctness_text), num_desired)
# Since the problems are randomly selected, we have to check
# the correct number of problems are returned.
count = 0
for problem in expected_problems:
if problem in response.content:
count += 1
self.assertEqual(count, num_desired)
self.assertEqual(response.content.count(self.URL_BEGINNING), num_desired)
@ddt.data(2, 6)
def test_review_problem_urls(self, num_desired):
"""
Verify that the URLs returned from the Review xBlock are valid and
correct URLs for the problems the learner has seen.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has enough problems in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section2_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
user = User.objects.get(email=self.STUDENTS[0]['email'])
crum.set_current_user(user)
result_urls = get_review_ids.get_problems(num_desired, self.course_actual.id)
expected_urls = [
(self.URL_BEGINNING + 'problem+block@Problem_1', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_2', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_3', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_4', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_5', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_6', True, 0)
]
# Since the problems are randomly selected, we have to check
# the correct number of urls are returned.
count = 0
for url in expected_urls:
if url in result_urls:
count += 1
self.assertEqual(count, num_desired)
@ddt.data(2, 5)
def test_review_problem_urls_unique_problem(self, num_desired):
"""
Verify that the URLs returned from the Review xBlock are valid and
correct URLs for the problems the learner has seen. This test will give
a unique problem to a learner and verify only that learner sees
it as a review. It will also ensure that if a learner has not loaded a
problem, it should never show up as a review problem
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has enough problems in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
user = User.objects.get(email=self.STUDENTS[0]['email'])
crum.set_current_user(user)
result_urls = get_review_ids.get_problems(num_desired, self.course_actual.id)
expected_urls = [
(self.URL_BEGINNING + 'problem+block@Problem_1', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_2', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_3', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_4', True, 0),
# This is the unique problem when num_desired == 5
(self.URL_BEGINNING + 'problem+block@Problem_6', True, 0)
]
expected_not_loaded_problem = (self.URL_BEGINNING + 'problem+block@Problem_5', True, 0)
# Since the problems are randomly selected, we have to check
# the correct number of urls are returned.
count = 0
for url in expected_urls:
if url in result_urls:
count += 1
self.assertEqual(count, num_desired)
self.assertNotIn(expected_not_loaded_problem, result_urls)
# NOTE: This test is failing because when I grab the problem from the CSM,
# it is unable to find its parents. This is some issue with the BlockStructure
# and it not being populated the way we want. For now, this is being left out
# since the first course I'm working with does not use this function.
# TODO: Fix get_vertical from get_review_ids to have the block structure for this test
# or fix something in this file to make sure it populates the block structure for the CSM
@unittest.skip
def test_review_vertical_url(self):
"""
Verify that the URL returned from the Review xBlock is a valid and
correct URL for the vertical the learner has seen.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has problems and thus a vertical in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
user = User.objects.get(email=self.STUDENTS[0]['email'])
crum.set_current_user(user)
result_url = get_review_ids.get_vertical(self.course_actual.id)
expected_url = self.URL_BEGINNING + 'vertical+block@New_Unit_1'
self.assertEqual(result_url, expected_url)
| lduarte1991/edx-platform | openedx/tests/xblock_integration/test_review_xblock.py | Python | agpl-3.0 | 21,237 |
# -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
| Elico-Corp/openerp-7.0 | mrp_mo_nopicking/mrp.py | Python | agpl-3.0 | 4,435 |
# ETConf -- web-based user-friendly computer hardware configurator
# Copyright (C) 2010-2011 ETegro Technologies, PLC <http://etegro.com/>
# Sergey Matveev <sergey.matveev@etegro.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
urlpatterns = patterns( "configurator.giver.views",
( r"^perform/(?P<computermodel_alias>.+)/$", "perform" ),
( r"^configurator/(?P<computermodel_alias>.+)/$", "configurator" ),
( r"^computermodel/request/(?P<computermodel_alias>.+)$", "computermodel_request" ),
)
| ETegro/ETConf | giver/urls.py | Python | agpl-3.0 | 1,178 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Infrastructure
# Copyright (C) 2014 Ingenieria ADHOC
# No email
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp import netsvc
from openerp.osv import osv, fields
class database_type(osv.osv):
""""""
_name = 'infrastructure.database_type'
_description = 'database_type'
_columns = {
'name': fields.char(string='Name', required=True),
'prefix': fields.char(string='Prefix', required=True, size=4),
'url_prefix': fields.char(string='URL Prefix'),
'automatic_drop': fields.boolean(string='Automatic Drop'),
'automatic_drop_days': fields.integer(string='Automatic Drop Days'),
'protect_db': fields.boolean(string='Protect DBs?'),
'color': fields.integer(string='Color'),
'automatic_deactivation': fields.boolean(string='Atumatic Deactivation?'),
'auto_deactivation_days': fields.integer(string='Automatic Drop Days'),
'url_example': fields.char(string='URL Example'),
'bd_name_example': fields.char(string='BD Name Example'),
'db_back_up_policy_ids': fields.many2many('infrastructure.db_back_up_policy', 'infrastructure_database_type_ids_db_back_up_policy_ids_rel', 'database_type_id', 'db_back_up_policy_id', string='Suggested Backup Policies'),
}
_defaults = {
}
_constraints = [
]
database_type()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| zhaohuaw/odoo-infrastructure | addons/infrastructure/database_type.py | Python | agpl-3.0 | 2,270 |
# SPDX-License-Identifier: AGPL-3.0-or-later
"""
SepiaSearch (Videos)
"""
from json import loads
from dateutil import parser, relativedelta
from urllib.parse import urlencode
from datetime import datetime
# about
about = {
"website": 'https://sepiasearch.org',
"wikidata_id": None,
"official_api_documentation": "https://framagit.org/framasoft/peertube/search-index/-/tree/master/server/controllers/api", # NOQA
"use_official_api": True,
"require_api_key": False,
"results": 'JSON',
}
categories = ['videos']
paging = True
time_range_support = True
safesearch = True
supported_languages = [
'en', 'fr', 'ja', 'eu', 'ca', 'cs', 'eo', 'el',
'de', 'it', 'nl', 'es', 'oc', 'gd', 'zh', 'pt',
'sv', 'pl', 'fi', 'ru'
]
base_url = 'https://sepiasearch.org/api/v1/search/videos'
safesearch_table = {
0: 'both',
1: 'false',
2: 'false'
}
time_range_table = {
'day': relativedelta.relativedelta(),
'week': relativedelta.relativedelta(weeks=-1),
'month': relativedelta.relativedelta(months=-1),
'year': relativedelta.relativedelta(years=-1)
}
embedded_url = '<iframe width="540" height="304" src="{url}" frameborder="0" allowfullscreen></iframe>'
def minute_to_hm(minute):
if isinstance(minute, int):
return "%d:%02d" % (divmod(minute, 60))
return None
def request(query, params):
params['url'] = base_url + '?' + urlencode({
'search': query,
'start': (params['pageno'] - 1) * 10,
'count': 10,
'sort': '-match',
'nsfw': safesearch_table[params['safesearch']]
})
language = params['language'].split('-')[0]
if language in supported_languages:
params['url'] += '&languageOneOf[]=' + language
if params['time_range'] in time_range_table:
time = datetime.now().date() + time_range_table[params['time_range']]
params['url'] += '&startDate=' + time.isoformat()
return params
def response(resp):
results = []
search_results = loads(resp.text)
if 'data' not in search_results:
return []
for result in search_results['data']:
title = result['name']
content = result['description']
thumbnail = result['thumbnailUrl']
publishedDate = parser.parse(result['publishedAt'])
embedded = embedded_url.format(url=result.get('embedUrl'))
author = result.get('account', {}).get('displayName')
length = minute_to_hm(result.get('duration'))
url = result['url']
results.append({'url': url,
'title': title,
'content': content,
'author': author,
'length': length,
'template': 'videos.html',
'publishedDate': publishedDate,
'embedded': embedded,
'thumbnail': thumbnail})
return results
| dalf/searx | searx/engines/sepiasearch.py | Python | agpl-3.0 | 2,928 |
# -*- coding: utf-8 -*-
#
# SPDX-FileCopyrightText: 2013-2021 Agora Voting SL <contact@nvotes.com>
#
# SPDX-License-Identifier: AGPL-3.0-only
#
import pickle
import base64
import json
import re
from datetime import datetime
from flask import Blueprint, request, make_response, abort
from frestq.utils import loads, dumps
from frestq.tasks import SimpleTask, TaskError
from frestq.app import app, db
from models import Election, Authority, QueryQueue
from create_election.performer_jobs import check_election_data
from taskqueue import queue_task, apply_task, dequeue_task
public_api = Blueprint('public_api', __name__)
def error(status, message=""):
if message:
data = json.dumps(dict(message=message))
else:
data=""
return make_response(data, status)
@public_api.route('/dequeue', methods=['GET'])
def dequeue():
try:
dequeue_task()
except Exception as e:
return make_response(dumps(dict(status=e.message)), 202)
return make_response(dumps(dict(status="ok")), 202)
@public_api.route('/election', methods=['POST'])
def post_election():
'''
POST /election
Creates an election, with the given input data. This involves communicating
with the different election authorities to generate the joint public key.
Example request:
POST /election
{
"id": 1110,
"title": "Votación de candidatos",
"description": "Selecciona los documentos político, ético y organizativo con los que Podemos",
"director": "wadobo-auth1",
"authorities": "openkratio-authority",
"layout": "pcandidates-election",
"presentation": {
"share_text": "lo que sea",
"theme": "foo",
"urls": [
{
"title": "",
"url": ""
}
],
"theme_css": "whatever"
},
"end_date": "2013-12-09T18:17:14.457000",
"start_date": "2013-12-06T18:17:14.457000",
"questions": [
{
"description": "",
"layout": "pcandidates-election",
"max": 1,
"min": 0,
"num_winners": 1,
"title": "Secretaría General",
"randomize_answer_order": true,
"tally_type": "plurality-at-large",
"answer_total_votes_percentage": "over-total-valid-votes",
"answers": [
{
"id": 0,
"category": "Equipo de Enfermeras",
"details": "",
"sort_order": 1,
"urls": [
{
"title": "",
"url": ""
}
],
"text": "Fulanita de tal",
}
]
}
],
"authorities": [
{
"name": "Asociación Sugus GNU/Linux",
"orchestra_url": "https://sugus.eii.us.es/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
},
{
"name": "Agora Ciudadana",
"orchestra_url": "https://agoravoting.com:6874/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
},
{
"name": "Wadobo Labs",
"orchestra_url": "https://wadobo.com:6874/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
}
]
}
On success, response is empty with status 202 Accepted and returns something
like:
{
"task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558",
}
When the election finally gets processed, the callback_url is called with a
POST containing the protInfo.xml file generated jointly by each
authority, following this example response:
{
"status": "finished",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /election"
},
"session_data": [{
"session_id": "deadbeef-03fa-4890-aa83-2fc558e645b5",
"publickey": ["<pubkey codified in hexadecimal>"]
}]
}
Note that this protInfo.xml will contain the election public key, but
also some other information. In particular, it's worth noting that
the http and hint servers' urls for each authority could change later,
if election-orchestra needs it.
If there was an error, then the callback will be called following this
example format:
{
"status": "error",
"reference": {
"session_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /election"
},
"data": {
"message": "error message"
}
}
'''
data = request.get_json(force=True, silent=True)
d = base64.b64encode(pickle.dumps(data)).decode('utf-8')
queueid = queue_task(task='election', data=d)
return make_response(dumps(dict(queue_id=queueid)), 202)
@public_api.route('/tally', methods=['POST'])
def post_tally():
'''
POST /tally
Tallies an election, with the given input data. This involves communicating
with the different election authorities to do the tally.
Example request:
POST /tally
{
"election_id": 111,
"callback_url": "https://127.0.0.1:5000/public_api/receive_tally",
"votes_url": "https://127.0.0.1:5000/public_data/vota4/encrypted_ciphertexts",
"votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk"
}
On success, response is empty with status 202 Accepted and returns something
like:
{
"task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558",
}
When the election finally gets processed, the callback_url is called with POST
similar to the following example:
{
"status": "finished",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /tally"
},
"data": {
"votes_url": "https://127.0.0.1:5000/public_data/vota4/tally.tar.bz2",
"votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk"
}
}
If there was an error, then the callback will be called following this
example format:
{
"status": "error",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /tally"
},
"data": {
"message": "error message"
}
}
'''
# first of all, parse input data
data = request.get_json(force=True, silent=True)
d = base64.b64encode(pickle.dumps(data)).decode('utf-8')
queueid = queue_task(task='tally', data=d)
return make_response(dumps(dict(queue_id=queueid)), 202)
@public_api.route('/receive_election', methods=['POST'])
def receive_election():
'''
This is a test route to be able to test that callbacks are correctly sent
'''
print("ATTENTION received election callback: ")
print(request.get_json(force=True, silent=True))
return make_response("", 202)
@public_api.route('/receive_tally', methods=['POST'])
def receive_tally():
'''
This is a test route to be able to test that callbacks are correctly sent
'''
print("ATTENTION received tally callback: ")
print(request.get_json(force=True, silent=True))
return make_response("", 202)
| agoravoting/election-orchestra | public_api.py | Python | agpl-3.0 | 8,209 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserProject.drive_auth'
db.add_column(u'user_project', 'drive_auth',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'UserProject.drive_auth'
db.delete_column(u'user_project', 'drive_auth')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'home.category': {
'Meta': {'object_name': 'Category', 'db_table': "u'category'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'projects.project': {
'Meta': {'object_name': 'Project', 'db_table': "u'project'"},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['home.Category']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'image_original_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'licence': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'type_field': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'db_column': "'type'", 'blank': 'True'})
},
'projects.projectpart': {
'Meta': {'object_name': 'ProjectPart', 'db_table': "u'project_part'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projectpart_created_user'", 'to': "orm['auth.User']"}),
'drive_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'projectpart_modified_user'", 'null': 'True', 'to': "orm['auth.User']"}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']"}),
'project_part': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.ProjectPart']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'projects.userproject': {
'Meta': {'object_name': 'UserProject', 'db_table': "u'user_project'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'userproject_created_user'", 'to': "orm['auth.User']"}),
'drive_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'userproject_modified_user'", 'null': 'True', 'to': "orm['auth.User']"}),
'permission': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '255'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']", 'db_column': "'project_id'"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['projects'] | taikoa/wevolver-server | wevolve/projects/migrations/0006_auto__add_field_userproject_drive_auth.py | Python | agpl-3.0 | 8,173 |
# Copyright (C) 2021 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
apartment controller manages the apartment objects that are known in the system
"""
import logging
from gateway.events import EsafeEvent, EventError
from gateway.exceptions import ItemDoesNotExistException, StateException
from gateway.models import Apartment, Database
from gateway.mappers import ApartmentMapper
from gateway.dto import ApartmentDTO
from gateway.pubsub import PubSub
from ioc import INJECTED, Inject, Injectable, Singleton
if False: # MyPy
from typing import List, Optional, Dict, Any
from esafe.rebus import RebusController
logger = logging.getLogger(__name__)
@Injectable.named('apartment_controller')
@Singleton
class ApartmentController(object):
def __init__(self):
self.rebus_controller = None # type: Optional[RebusController]
def set_rebus_controller(self, rebus_controller):
self.rebus_controller = rebus_controller
@staticmethod
@Inject
def send_config_change_event(msg, error=EventError.ErrorTypes.NO_ERROR, pubsub=INJECTED):
# type: (str, Dict[str, Any], PubSub) -> None
event = EsafeEvent(EsafeEvent.Types.CONFIG_CHANGE, {'type': 'apartment', 'msg': msg}, error=error)
pubsub.publish_esafe_event(PubSub.EsafeTopics.CONFIG, event)
@staticmethod
def load_apartment(apartment_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.id == apartment_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartment_by_mailbox_id(mailbox_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.mailbox_rebus_id == mailbox_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartment_by_doorbell_id(doorbell_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.doorbell_rebus_id == doorbell_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartments():
# type: () -> List[ApartmentDTO]
apartments = []
for apartment_orm in Apartment.select():
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
apartments.append(apartment_dto)
return apartments
@staticmethod
def get_apartment_count():
# type: () -> int
return Apartment.select().count()
@staticmethod
def apartment_id_exists(apartment_id):
# type: (int) -> bool
apartments = ApartmentController.load_apartments()
ids = (x.id for x in apartments)
return apartment_id in ids
def _check_rebus_ids(self, apartment_dto):
if self.rebus_controller is None:
raise StateException("Cannot save apartment: Rebus Controller is None")
if 'doorbell_rebus_id' in apartment_dto.loaded_fields and \
not self.rebus_controller.verify_device_exists(apartment_dto.doorbell_rebus_id):
raise ItemDoesNotExistException("Cannot save apartment: doorbell ({}) does not exists".format(apartment_dto.doorbell_rebus_id))
if 'mailbox_rebus_id' in apartment_dto.loaded_fields and \
not self.rebus_controller.verify_device_exists(apartment_dto.mailbox_rebus_id):
raise ItemDoesNotExistException("Cannot save apartment: mailbox ({}) does not exists".format(apartment_dto.mailbox_rebus_id))
def save_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
apartment_orm = ApartmentMapper.dto_to_orm(apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('save')
return ApartmentMapper.orm_to_dto(apartment_orm)
def save_apartments(self, apartments_dto):
apartments_dtos = []
for apartment in apartments_dto:
apartment_saved = self.save_apartment(apartment, send_event=False)
apartments_dtos.append(apartment_saved)
self.send_config_change_event('save')
return apartments_dtos
def update_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
if 'id' not in apartment_dto.loaded_fields or apartment_dto.id is None:
raise RuntimeError('cannot update an apartment without the id being set')
try:
apartment_orm = Apartment.get_by_id(apartment_dto.id)
loaded_apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
for field in apartment_dto.loaded_fields:
if field == 'id':
continue
if hasattr(apartment_dto, field):
setattr(loaded_apartment_dto, field, getattr(apartment_dto, field))
apartment_orm = ApartmentMapper.dto_to_orm(loaded_apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('update')
return ApartmentMapper.orm_to_dto(apartment_orm)
except Exception as e:
raise RuntimeError('Could not update the user: {}'.format(e))
def update_apartments(self, apartment_dtos):
# type: (List[ApartmentDTO]) -> Optional[List[ApartmentDTO]]
apartments = []
with Database.get_db().transaction() as transaction:
try:
# First clear all the rebus fields in order to be able to swap 2 fields
for apartment in apartment_dtos:
apartment_orm = Apartment.get_by_id(apartment.id) # type: Apartment
if 'mailbox_rebus_id' in apartment.loaded_fields:
apartment_orm.mailbox_rebus_id = None
if 'doorbell_rebus_id' in apartment.loaded_fields:
apartment_orm.doorbell_rebus_id = None
apartment_orm.save()
# Then check if there is already an apartment with an mailbox or doorbell rebus id that is passed
# This is needed for when an doorbell or mailbox gets assigned to another apartment. Then the first assignment needs to be deleted.
for apartment_orm in Apartment.select():
for apartment_dto in apartment_dtos:
if apartment_orm.mailbox_rebus_id == apartment_dto.mailbox_rebus_id and apartment_orm.mailbox_rebus_id is not None:
apartment_orm.mailbox_rebus_id = None
apartment_orm.save()
if apartment_orm.doorbell_rebus_id == apartment_dto.doorbell_rebus_id and apartment_orm.doorbell_rebus_id is not None:
apartment_orm.doorbell_rebus_id = None
apartment_orm.save()
for apartment in apartment_dtos:
updated = self.update_apartment(apartment, send_event=False)
if updated is not None:
apartments.append(updated)
self.send_config_change_event('update')
except Exception as ex:
logger.error('Could not update apartments: {}: {}'.format(type(ex).__name__, ex))
transaction.rollback()
return None
return apartments
@staticmethod
def delete_apartment(apartment_dto):
# type: (ApartmentDTO) -> None
if "id" in apartment_dto.loaded_fields and apartment_dto.id is not None:
Apartment.delete_by_id(apartment_dto.id)
elif "name" in apartment_dto.loaded_fields:
# First check if there is only one:
if Apartment.select().where(Apartment.name == apartment_dto.name).count() <= 1:
Apartment.delete().where(Apartment.name == apartment_dto.name).execute()
ApartmentController.send_config_change_event('delete')
else:
raise RuntimeError('More than one apartment with the given name: {}'.format(apartment_dto.name))
else:
raise RuntimeError('Could not find an apartment with the name {} to delete'.format(apartment_dto.name))
| openmotics/gateway | src/gateway/apartment_controller.py | Python | agpl-3.0 | 9,287 |
from odoo import fields, models
class Job(models.Model):
_inherit = "crm.team"
survey_id = fields.Many2one(
'survey.survey', "Interview Form",
help="Choose an interview form")
def action_print_survey(self):
return self.survey_id.action_print_survey()
| ingadhoc/sale | crm_survey/models/crm_job.py | Python | agpl-3.0 | 291 |
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from . import account_move
from . import account_move_line
from . import account_master_port
| ingadhoc/multi-company | account_multic_fix/models/__init__.py | Python | agpl-3.0 | 340 |
from ctypes import *
import ctypes.util
import threading
import os
import sys
from warnings import warn
from functools import partial
import collections
import re
import traceback
# vim: ts=4 sw=4 et
if os.name == 'nt':
backend = CDLL('mpv-1.dll')
fs_enc = 'utf-8'
else:
import locale
lc, enc = locale.getlocale(locale.LC_NUMERIC)
# libmpv requires LC_NUMERIC to be set to "C". Since messing with global variables everyone else relies upon is
# still better than segfaulting, we are setting LC_NUMERIC to "C".
locale.setlocale(locale.LC_NUMERIC, 'C')
sofile = ctypes.util.find_library('mpv')
if sofile is None:
raise OSError("Cannot find libmpv in the usual places. Depending on your distro, you may try installing an "
"mpv-devel or mpv-libs package. If you have libmpv around but this script can't find it, maybe consult "
"the documentation for ctypes.util.find_library which this script uses to look up the library "
"filename.")
backend = CDLL(sofile)
fs_enc = sys.getfilesystemencoding()
class MpvHandle(c_void_p):
pass
class MpvOpenGLCbContext(c_void_p):
pass
class PropertyUnavailableError(AttributeError):
pass
class ErrorCode(object):
""" For documentation on these, see mpv's libmpv/client.h """
SUCCESS = 0
EVENT_QUEUE_FULL = -1
NOMEM = -2
UNINITIALIZED = -3
INVALID_PARAMETER = -4
OPTION_NOT_FOUND = -5
OPTION_FORMAT = -6
OPTION_ERROR = -7
PROPERTY_NOT_FOUND = -8
PROPERTY_FORMAT = -9
PROPERTY_UNAVAILABLE = -10
PROPERTY_ERROR = -11
COMMAND = -12
EXCEPTION_DICT = {
0: None,
-1: lambda *a: MemoryError('mpv event queue full', *a),
-2: lambda *a: MemoryError('mpv cannot allocate memory', *a),
-3: lambda *a: ValueError('Uninitialized mpv handle used', *a),
-4: lambda *a: ValueError('Invalid value for mpv parameter', *a),
-5: lambda *a: AttributeError('mpv option does not exist', *a),
-6: lambda *a: TypeError('Tried to set mpv option using wrong format', *a),
-7: lambda *a: ValueError('Invalid value for mpv option', *a),
-8: lambda *a: AttributeError('mpv property does not exist', *a),
# Currently (mpv 0.18.1) there is a bug causing a PROPERTY_FORMAT error to be returned instead of
# INVALID_PARAMETER when setting a property-mapped option to an invalid value.
-9: lambda *a: TypeError('Tried to get/set mpv property using wrong format, or passed invalid value', *a),
-10: lambda *a: PropertyUnavailableError('mpv property is not available', *a),
-11: lambda *a: RuntimeError('Generic error getting or setting mpv property', *a),
-12: lambda *a: SystemError('Error running mpv command', *a) }
@staticmethod
def default_error_handler(ec, *args):
return ValueError(_mpv_error_string(ec).decode('utf-8'), ec, *args)
@classmethod
def raise_for_ec(kls, ec, func, *args):
ec = 0 if ec > 0 else ec
ex = kls.EXCEPTION_DICT.get(ec , kls.default_error_handler)
if ex:
raise ex(ec, *args)
class MpvFormat(c_int):
NONE = 0
STRING = 1
OSD_STRING = 2
FLAG = 3
INT64 = 4
DOUBLE = 5
NODE = 6
NODE_ARRAY = 7
NODE_MAP = 8
BYTE_ARRAY = 9
def __eq__(self, other):
return self is other or self.value == other or self.value == int(other)
def __repr__(self):
return ['NONE', 'STRING', 'OSD_STRING', 'FLAG', 'INT64', 'DOUBLE', 'NODE', 'NODE_ARRAY', 'NODE_MAP',
'BYTE_ARRAY'][self.value]
class MpvEventID(c_int):
NONE = 0
SHUTDOWN = 1
LOG_MESSAGE = 2
GET_PROPERTY_REPLY = 3
SET_PROPERTY_REPLY = 4
COMMAND_REPLY = 5
START_FILE = 6
END_FILE = 7
FILE_LOADED = 8
TRACKS_CHANGED = 9
TRACK_SWITCHED = 10
IDLE = 11
PAUSE = 12
UNPAUSE = 13
TICK = 14
SCRIPT_INPUT_DISPATCH = 15
CLIENT_MESSAGE = 16
VIDEO_RECONFIG = 17
AUDIO_RECONFIG = 18
METADATA_UPDATE = 19
SEEK = 20
PLAYBACK_RESTART = 21
PROPERTY_CHANGE = 22
CHAPTER_CHANGE = 23
ANY = ( SHUTDOWN, LOG_MESSAGE, GET_PROPERTY_REPLY, SET_PROPERTY_REPLY, COMMAND_REPLY, START_FILE, END_FILE,
FILE_LOADED, TRACKS_CHANGED, TRACK_SWITCHED, IDLE, PAUSE, UNPAUSE, TICK, SCRIPT_INPUT_DISPATCH,
CLIENT_MESSAGE, VIDEO_RECONFIG, AUDIO_RECONFIG, METADATA_UPDATE, SEEK, PLAYBACK_RESTART, PROPERTY_CHANGE,
CHAPTER_CHANGE )
def __repr__(self):
return ['NONE', 'SHUTDOWN', 'LOG_MESSAGE', 'GET_PROPERTY_REPLY', 'SET_PROPERTY_REPLY', 'COMMAND_REPLY',
'START_FILE', 'END_FILE', 'FILE_LOADED', 'TRACKS_CHANGED', 'TRACK_SWITCHED', 'IDLE', 'PAUSE', 'UNPAUSE',
'TICK', 'SCRIPT_INPUT_DISPATCH', 'CLIENT_MESSAGE', 'VIDEO_RECONFIG', 'AUDIO_RECONFIG',
'METADATA_UPDATE', 'SEEK', 'PLAYBACK_RESTART', 'PROPERTY_CHANGE', 'CHAPTER_CHANGE'][self.value]
class MpvNodeList(Structure):
def array_value(self, decode_str=False):
return [ self.values[i].node_value(decode_str) for i in range(self.num) ]
def dict_value(self, decode_str=False):
return { self.keys[i].decode('utf-8'): self.values[i].node_value(decode_str) for i in range(self.num) }
class MpvNode(Structure):
_fields_ = [('val', c_longlong),
('format', MpvFormat)]
def node_value(self, decode_str=False):
return MpvNode.node_cast_value(byref(c_void_p(self.val)), self.format.value, decode_str)
@staticmethod
def node_cast_value(v, fmt, decode_str=False):
dwrap = lambda s: s.decode('utf-8') if decode_str else s
return {
MpvFormat.NONE: lambda v: None,
MpvFormat.STRING: lambda v: dwrap(cast(v, POINTER(c_char_p)).contents.value),
MpvFormat.OSD_STRING: lambda v: cast(v, POINTER(c_char_p)).contents.value.decode('utf-8'),
MpvFormat.FLAG: lambda v: bool(cast(v, POINTER(c_int)).contents.value),
MpvFormat.INT64: lambda v: cast(v, POINTER(c_longlong)).contents.value,
MpvFormat.DOUBLE: lambda v: cast(v, POINTER(c_double)).contents.value,
MpvFormat.NODE: lambda v: cast(v, POINTER(MpvNode)).contents.node_value(decode_str),
MpvFormat.NODE_ARRAY: lambda v: cast(v, POINTER(POINTER(MpvNodeList))).contents.contents.array_value(decode_str),
MpvFormat.NODE_MAP: lambda v: cast(v, POINTER(POINTER(MpvNodeList))).contents.contents.dict_value(decode_str),
MpvFormat.BYTE_ARRAY: lambda v: cast(v, POINTER(c_char_p)).contents.value,
}[fmt](v)
MpvNodeList._fields_ = [('num', c_int),
('values', POINTER(MpvNode)),
('keys', POINTER(c_char_p))]
class MpvSubApi(c_int):
MPV_SUB_API_OPENGL_CB = 1
class MpvEvent(Structure):
_fields_ = [('event_id', MpvEventID),
('error', c_int),
('reply_userdata', c_ulonglong),
('data', c_void_p)]
def as_dict(self):
dtype = {MpvEventID.END_FILE: MpvEventEndFile,
MpvEventID.PROPERTY_CHANGE: MpvEventProperty,
MpvEventID.GET_PROPERTY_REPLY: MpvEventProperty,
MpvEventID.LOG_MESSAGE: MpvEventLogMessage,
MpvEventID.SCRIPT_INPUT_DISPATCH: MpvEventScriptInputDispatch,
MpvEventID.CLIENT_MESSAGE: MpvEventClientMessage
}.get(self.event_id.value, None)
return {'event_id': self.event_id.value,
'error': self.error,
'reply_userdata': self.reply_userdata,
'event': cast(self.data, POINTER(dtype)).contents.as_dict() if dtype else None}
class MpvEventProperty(Structure):
_fields_ = [('name', c_char_p),
('format', MpvFormat),
('data', c_void_p)]
def as_dict(self):
if self.format.value == MpvFormat.STRING:
proptype, _access = ALL_PROPERTIES.get(self.name, (str, None))
return {'name': self.name.decode('utf-8'),
'format': self.format,
'data': self.data,
'value': proptype(cast(self.data, POINTER(c_char_p)).contents.value.decode('utf-8'))}
else:
return {'name': self.name.decode('utf-8'),
'format': self.format,
'data': self.data}
class MpvEventLogMessage(Structure):
_fields_ = [('prefix', c_char_p),
('level', c_char_p),
('text', c_char_p)]
def as_dict(self):
return { 'prefix': self.prefix.decode('utf-8'),
'level': self.level.decode('utf-8'),
'text': self.text.decode('utf-8').rstrip() }
class MpvEventEndFile(c_int):
EOF_OR_INIT_FAILURE = 0
RESTARTED = 1
ABORTED = 2
QUIT = 3
def as_dict(self):
return {'reason': self.value}
class MpvEventScriptInputDispatch(Structure):
_fields_ = [('arg0', c_int),
('type', c_char_p)]
def as_dict(self):
pass # TODO
class MpvEventClientMessage(Structure):
_fields_ = [('num_args', c_int),
('args', POINTER(c_char_p))]
def as_dict(self):
return { 'args': [ self.args[i].decode('utf-8') for i in range(self.num_args) ] }
WakeupCallback = CFUNCTYPE(None, c_void_p)
OpenGlCbUpdateFn = CFUNCTYPE(None, c_void_p)
OpenGlCbGetProcAddrFn = CFUNCTYPE(None, c_void_p, c_char_p)
def _handle_func(name, args, restype, errcheck, ctx=MpvHandle):
func = getattr(backend, name)
func.argtypes = [ctx] + args if ctx else args
if restype is not None:
func.restype = restype
if errcheck is not None:
func.errcheck = errcheck
globals()['_'+name] = func
def bytes_free_errcheck(res, func, *args):
notnull_errcheck(res, func, *args)
rv = cast(res, c_void_p).value
_mpv_free(res)
return rv
def notnull_errcheck(res, func, *args):
if res is None:
raise RuntimeError('Underspecified error in MPV when calling {} with args {!r}: NULL pointer returned.'\
'Please consult your local debugger.'.format(func.__name__, args))
return res
ec_errcheck = ErrorCode.raise_for_ec
def _handle_gl_func(name, args=[], restype=None):
_handle_func(name, args, restype, errcheck=None, ctx=MpvOpenGLCbContext)
backend.mpv_client_api_version.restype = c_ulong
def _mpv_client_api_version():
ver = backend.mpv_client_api_version()
return ver>>16, ver&0xFFFF
backend.mpv_free.argtypes = [c_void_p]
_mpv_free = backend.mpv_free
backend.mpv_free_node_contents.argtypes = [c_void_p]
_mpv_free_node_contents = backend.mpv_free_node_contents
backend.mpv_create.restype = MpvHandle
_mpv_create = backend.mpv_create
_handle_func('mpv_create_client', [c_char_p], MpvHandle, notnull_errcheck)
_handle_func('mpv_client_name', [], c_char_p, errcheck=None)
_handle_func('mpv_initialize', [], c_int, ec_errcheck)
_handle_func('mpv_detach_destroy', [], None, errcheck=None)
_handle_func('mpv_terminate_destroy', [], None, errcheck=None)
_handle_func('mpv_load_config_file', [c_char_p], c_int, ec_errcheck)
_handle_func('mpv_suspend', [], None, errcheck=None)
_handle_func('mpv_resume', [], None, errcheck=None)
_handle_func('mpv_get_time_us', [], c_ulonglong, errcheck=None)
_handle_func('mpv_set_option', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck)
_handle_func('mpv_set_option_string', [c_char_p, c_char_p], c_int, ec_errcheck)
_handle_func('mpv_command', [POINTER(c_char_p)], c_int, ec_errcheck)
_handle_func('mpv_command_string', [c_char_p, c_char_p], c_int, ec_errcheck)
_handle_func('mpv_command_async', [c_ulonglong, POINTER(c_char_p)], c_int, ec_errcheck)
_handle_func('mpv_set_property', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck)
_handle_func('mpv_set_property_string', [c_char_p, c_char_p], c_int, ec_errcheck)
_handle_func('mpv_set_property_async', [c_ulonglong, c_char_p, MpvFormat,c_void_p],c_int, ec_errcheck)
_handle_func('mpv_get_property', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck)
_handle_func('mpv_get_property_string', [c_char_p], c_void_p, bytes_free_errcheck)
_handle_func('mpv_get_property_osd_string', [c_char_p], c_void_p, bytes_free_errcheck)
_handle_func('mpv_get_property_async', [c_ulonglong, c_char_p, MpvFormat], c_int, ec_errcheck)
_handle_func('mpv_observe_property', [c_ulonglong, c_char_p, MpvFormat], c_int, ec_errcheck)
_handle_func('mpv_unobserve_property', [c_ulonglong], c_int, ec_errcheck)
_handle_func('mpv_event_name', [c_int], c_char_p, errcheck=None, ctx=None)
_handle_func('mpv_error_string', [c_int], c_char_p, errcheck=None, ctx=None)
_handle_func('mpv_request_event', [MpvEventID, c_int], c_int, ec_errcheck)
_handle_func('mpv_request_log_messages', [c_char_p], c_int, ec_errcheck)
_handle_func('mpv_wait_event', [c_double], POINTER(MpvEvent), errcheck=None)
_handle_func('mpv_wakeup', [], None, errcheck=None)
_handle_func('mpv_set_wakeup_callback', [WakeupCallback, c_void_p], None, errcheck=None)
_handle_func('mpv_get_wakeup_pipe', [], c_int, errcheck=None)
_handle_func('mpv_get_sub_api', [MpvSubApi], c_void_p, notnull_errcheck)
_handle_gl_func('mpv_opengl_cb_set_update_callback', [OpenGlCbUpdateFn, c_void_p])
_handle_gl_func('mpv_opengl_cb_init_gl', [c_char_p, OpenGlCbGetProcAddrFn, c_void_p], c_int)
_handle_gl_func('mpv_opengl_cb_draw', [c_int, c_int, c_int], c_int)
_handle_gl_func('mpv_opengl_cb_render', [c_int, c_int], c_int)
_handle_gl_func('mpv_opengl_cb_report_flip', [c_ulonglong], c_int)
_handle_gl_func('mpv_opengl_cb_uninit_gl', [], c_int)
def _ensure_encoding(possibly_bytes):
return possibly_bytes.decode('utf-8') if type(possibly_bytes) is bytes else possibly_bytes
def _event_generator(handle):
while True:
event = _mpv_wait_event(handle, -1).contents
if event.event_id.value == MpvEventID.NONE:
raise StopIteration()
yield event
def load_lua():
""" Use this function if you intend to use mpv's built-in lua interpreter. This is e.g. needed for playback of
youtube urls. """
CDLL('liblua.so', mode=RTLD_GLOBAL)
def _event_loop(event_handle, playback_cond, event_callbacks, message_handlers, property_handlers, log_handler):
for event in _event_generator(event_handle):
try:
devent = event.as_dict() # copy data from ctypes
eid = devent['event_id']
for callback in event_callbacks:
callback(devent)
if eid in (MpvEventID.SHUTDOWN, MpvEventID.END_FILE):
with playback_cond:
playback_cond.notify_all()
if eid == MpvEventID.PROPERTY_CHANGE:
pc = devent['event']
name = pc['name']
if 'value' in pc:
proptype, _access = ALL_PROPERTIES[name]
if proptype is bytes:
args = (pc['value'],)
else:
args = (proptype(_ensure_encoding(pc['value'])),)
elif pc['format'] == MpvFormat.NONE:
args = (None,)
else:
args = (pc['data'], pc['format'])
for handler in property_handlers[name]:
handler(*args)
if eid == MpvEventID.LOG_MESSAGE and log_handler is not None:
ev = devent['event']
log_handler(ev['level'], ev['prefix'], ev['text'])
if eid == MpvEventID.CLIENT_MESSAGE:
# {'event': {'args': ['key-binding', 'foo', 'u-', 'g']}, 'reply_userdata': 0, 'error': 0, 'event_id': 16}
target, *args = devent['event']['args']
if target in message_handlers:
message_handlers[target](*args)
if eid == MpvEventID.SHUTDOWN:
_mpv_detach_destroy(event_handle)
return
except Exception as e:
traceback.print_exc()
class MPV(object):
""" See man mpv(1) for the details of the implemented commands. """
def __init__(self, *extra_mpv_flags, log_handler=None, start_event_thread=True, **extra_mpv_opts):
""" Create an MPV instance.
Extra arguments and extra keyword arguments will be passed to mpv as options. """
self._event_thread = None
self.handle = _mpv_create()
_mpv_set_option_string(self.handle, b'audio-display', b'no')
istr = lambda o: ('yes' if o else 'no') if type(o) is bool else str(o)
try:
for flag in extra_mpv_flags:
_mpv_set_option_string(self.handle, flag.encode('utf-8'), b'')
for k,v in extra_mpv_opts.items():
_mpv_set_option_string(self.handle, k.replace('_', '-').encode('utf-8'), istr(v).encode('utf-8'))
except AttributeError as e:
_mpv_initialize(self.handle)
raise e
_mpv_initialize(self.handle)
self._event_callbacks = []
self._property_handlers = collections.defaultdict(lambda: [])
self._message_handlers = {}
self._key_binding_handlers = {}
self._playback_cond = threading.Condition()
self._event_handle = _mpv_create_client(self.handle, b'py_event_handler')
self._loop = partial(_event_loop, self._event_handle, self._playback_cond, self._event_callbacks,
self._message_handlers, self._property_handlers, log_handler)
if start_event_thread:
self._event_thread = threading.Thread(target=self._loop, name='MPVEventHandlerThread')
self._event_thread.setDaemon(True)
self._event_thread.start()
else:
self._event_thread = None
if log_handler is not None:
self.set_loglevel('terminal-default')
def wait_for_playback(self):
""" Waits until playback of the current title is paused or done """
with self._playback_cond:
self._playback_cond.wait()
def wait_for_property(self, name, cond=lambda val: val, level_sensitive=True):
sema = threading.Semaphore(value=0)
def observer(val):
if cond(val):
sema.release()
self.observe_property(name, observer)
if not level_sensitive or not cond(getattr(self, name.replace('-', '_'))):
sema.acquire()
self.unobserve_property(name, observer)
def __del__(self):
if self.handle:
self.terminate()
def terminate(self):
self.handle, handle = None, self.handle
if threading.current_thread() is self._event_thread:
# Handle special case to allow event handle to be detached.
# This is necessary since otherwise the event thread would deadlock itself.
grim_reaper = threading.Thread(target=lambda: _mpv_terminate_destroy(handle))
grim_reaper.start()
else:
_mpv_terminate_destroy(handle)
if self._event_thread:
self._event_thread.join()
def set_loglevel(self, level):
_mpv_request_log_messages(self._event_handle, level.encode('utf-8'))
def command(self, name, *args):
""" Execute a raw command """
args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8'))
for arg in args if arg is not None ] + [None]
_mpv_command(self.handle, (c_char_p*len(args))(*args))
def seek(self, amount, reference="relative", precision="default-precise"):
self.command('seek', amount, reference, precision)
def revert_seek(self):
self.command('revert_seek');
def frame_step(self):
self.command('frame_step')
def frame_back_step(self):
self.command('frame_back_step')
def _add_property(self, name, value=None):
self.command('add_property', name, value)
def _cycle_property(self, name, direction='up'):
self.command('cycle_property', name, direction)
def _multiply_property(self, name, factor):
self.command('multiply_property', name, factor)
def screenshot(self, includes='subtitles', mode='single'):
self.command('screenshot', includes, mode)
def screenshot_to_file(self, filename, includes='subtitles'):
self.command('screenshot_to_file', filename.encode(fs_enc), includes)
def playlist_next(self, mode='weak'):
self.command('playlist_next', mode)
def playlist_prev(self, mode='weak'):
self.command('playlist_prev', mode)
@staticmethod
def _encode_options(options):
return ','.join('{}={}'.format(str(key), str(val)) for key, val in options.items())
def loadfile(self, filename, mode='replace', **options):
self.command('loadfile', filename.encode(fs_enc), mode, MPV._encode_options(options))
def loadlist(self, playlist, mode='replace'):
self.command('loadlist', playlist.encode(fs_enc), mode)
def playlist_clear(self):
self.command('playlist_clear')
def playlist_remove(self, index='current'):
self.command('playlist_remove', index)
def playlist_move(self, index1, index2):
self.command('playlist_move', index1, index2)
def run(self, command, *args):
self.command('run', command, *args)
def quit(self, code=None):
self.command('quit', code)
def quit_watch_later(self, code=None):
self.command('quit_watch_later', code)
def sub_add(self, filename):
self.command('sub_add', filename.encode(fs_enc))
def sub_remove(self, sub_id=None):
self.command('sub_remove', sub_id)
def sub_reload(self, sub_id=None):
self.command('sub_reload', sub_id)
def sub_step(self, skip):
self.command('sub_step', skip)
def sub_seek(self, skip):
self.command('sub_seek', skip)
def toggle_osd(self):
self.command('osd')
def show_text(self, string, duration='-', level=None):
self.command('show_text', string, duration, level)
def show_progress(self):
self.command('show_progress')
def discnav(self, command):
self.command('discnav', command)
def write_watch_later_config(self):
self.command('write_watch_later_config')
def overlay_add(self, overlay_id, x, y, file_or_fd, offset, fmt, w, h, stride):
self.command('overlay_add', overlay_id, x, y, file_or_fd, offset, fmt, w, h, stride)
def overlay_remove(self, overlay_id):
self.command('overlay_remove', overlay_id)
def script_message(self, *args):
self.command('script_message', *args)
def script_message_to(self, target, *args):
self.command('script_message_to', target, *args)
def observe_property(self, name, handler):
self._property_handlers[name].append(handler)
_mpv_observe_property(self._event_handle, hash(name)&0xffffffffffffffff, name.encode('utf-8'), MpvFormat.STRING)
def unobserve_property(self, name, handler):
handlers = self._property_handlers[name]
handlers.remove(handler)
if not handlers:
_mpv_unobserve_property(self._event_handle, hash(name)&0xffffffffffffffff)
def register_message_handler(self, target, handler):
self._message_handlers[target] = handler
def unregister_message_handler(self, target):
del self._message_handlers[target]
def register_event_callback(self, callback):
self._event_callbacks.append(callback)
def unregister_event_callback(self, callback):
self._event_callbacks.remove(callback)
@staticmethod
def _binding_name(callback_or_cmd):
return 'py_kb_{:016x}'.format(hash(callback_or_cmd)&0xffffffffffffffff)
def register_key_binding(self, keydef, callback_or_cmd, mode='force'):
""" BIG FAT WARNING: mpv's key binding mechanism is pretty powerful. This means, you essentially get arbitrary
code exectution through key bindings. This interface makes some limited effort to sanitize the keydef given in
the first parameter, but YOU SHOULD NOT RELY ON THIS IN FOR SECURITY. If your input comes from config files,
this is completely fine--but, if you are about to pass untrusted input into this parameter, better double-check
whether this is secure in your case. """
if not re.match(r'(Shift+)?(Ctrl+)?(Alt+)?(Meta+)?(.|\w+)', keydef):
raise ValueError('Invalid keydef. Expected format: [Shift+][Ctrl+][Alt+][Meta+]<key>\n'
'<key> is either the literal character the key produces (ASCII or Unicode character), or a '
'symbolic name (as printed by --input-keylist')
binding_name = MPV._binding_name(keydef)
if callable(callback_or_cmd):
self._key_binding_handlers[binding_name] = callback_or_cmd
self.register_message_handler('key-binding', self._handle_key_binding_message)
self.command('define-section',
binding_name, '{} script-binding py_event_handler/{}'.format(keydef, binding_name), mode)
elif isinstance(callback_or_cmd, str):
self.command('define-section', binding_name, '{} {}'.format(keydef, callback_or_cmd), mode)
else:
raise TypeError('register_key_binding expects either an str with an mpv command or a python callable.')
self.command('enable-section', binding_name)
def _handle_key_binding_message(self, binding_name, key_state, key_name):
self._key_binding_handlers[binding_name](key_state, key_name)
def unregister_key_binding(self, keydef):
binding_name = MPV._binding_name(keydef)
self.command('disable-section', binding_name)
self.command('define-section', binding_name, '')
if callable(callback):
del self._key_binding_handlers[binding_name]
if not self._key_binding_handlers:
self.unregister_message_handler('key-binding')
# Convenience functions
def play(self, filename):
self.loadfile(filename)
# Property accessors
def _get_property(self, name, proptype=str, decode_str=False):
fmt = {int: MpvFormat.INT64,
float: MpvFormat.DOUBLE,
bool: MpvFormat.FLAG,
str: MpvFormat.STRING,
bytes: MpvFormat.STRING,
commalist: MpvFormat.STRING,
MpvFormat.NODE: MpvFormat.NODE}[proptype]
out = cast(create_string_buffer(sizeof(c_void_p)), c_void_p)
outptr = byref(out)
try:
cval = _mpv_get_property(self.handle, name.encode('utf-8'), fmt, outptr)
rv = MpvNode.node_cast_value(outptr, fmt, decode_str or proptype in (str, commalist))
if proptype is commalist:
rv = proptype(rv)
if proptype is str:
_mpv_free(out)
elif proptype is MpvFormat.NODE:
_mpv_free_node_contents(outptr)
return rv
except PropertyUnavailableError as ex:
return None
def _set_property(self, name, value, proptype=str):
ename = name.encode('utf-8')
if type(value) is bytes:
_mpv_set_property_string(self.handle, ename, value)
elif type(value) is bool:
_mpv_set_property_string(self.handle, ename, b'yes' if value else b'no')
elif proptype in (str, int, float):
_mpv_set_property_string(self.handle, ename, str(proptype(value)).encode('utf-8'))
else:
raise TypeError('Cannot set {} property {} to value of type {}'.format(proptype, name, type(value)))
# Dict-like option access
def __getitem__(self, name, file_local=False):
""" Get an option value """
prefix = 'file-local-options/' if file_local else 'options/'
return self._get_property(prefix+name)
def __setitem__(self, name, value, file_local=False):
""" Get an option value """
prefix = 'file-local-options/' if file_local else 'options/'
return self._set_property(prefix+name, value)
def __iter__(self):
return iter(self.options)
def option_info(self, name):
return self._get_property('option-info/'+name)
def commalist(propval=''):
return str(propval).split(',')
node = MpvFormat.NODE
ALL_PROPERTIES = {
'osd-level': (int, 'rw'),
'osd-scale': (float, 'rw'),
'loop': (str, 'rw'),
'loop-file': (str, 'rw'),
'speed': (float, 'rw'),
'filename': (bytes, 'r'),
'file-size': (int, 'r'),
'path': (bytes, 'r'),
'media-title': (bytes, 'r'),
'stream-pos': (int, 'rw'),
'stream-end': (int, 'r'),
'length': (float, 'r'), # deprecated for ages now
'duration': (float, 'r'),
'avsync': (float, 'r'),
'total-avsync-change': (float, 'r'),
'drop-frame-count': (int, 'r'),
'percent-pos': (float, 'rw'),
# 'ratio-pos': (float, 'rw'),
'time-pos': (float, 'rw'),
'time-start': (float, 'r'),
'time-remaining': (float, 'r'),
'playtime-remaining': (float, 'r'),
'chapter': (int, 'rw'),
'edition': (int, 'rw'),
'disc-titles': (int, 'r'),
'disc-title': (str, 'rw'),
# 'disc-menu-active': (bool, 'r'),
'chapters': (int, 'r'),
'editions': (int, 'r'),
'angle': (int, 'rw'),
'pause': (bool, 'rw'),
'core-idle': (bool, 'r'),
'cache': (int, 'r'),
'cache-size': (int, 'rw'),
'cache-free': (int, 'r'),
'cache-used': (int, 'r'),
'cache-speed': (int, 'r'),
'cache-idle': (bool, 'r'),
'cache-buffering-state': (int, 'r'),
'paused-for-cache': (bool, 'r'),
# 'pause-for-cache': (bool, 'r'),
'eof-reached': (bool, 'r'),
# 'pts-association-mode': (str, 'rw'),
'hr-seek': (str, 'rw'),
'volume': (float, 'rw'),
'volume-max': (int, 'rw'),
'ao-volume': (float, 'rw'),
'mute': (bool, 'rw'),
'ao-mute': (bool, 'rw'),
'audio-speed-correction': (float, 'r'),
'audio-delay': (float, 'rw'),
'audio-format': (str, 'r'),
'audio-codec': (str, 'r'),
'audio-codec-name': (str, 'r'),
'audio-bitrate': (float, 'r'),
'packet-audio-bitrate': (float, 'r'),
'audio-samplerate': (int, 'r'),
'audio-channels': (str, 'r'),
'aid': (str, 'rw'),
'audio': (str, 'rw'), # alias for aid
'balance': (int, 'rw'),
'fullscreen': (bool, 'rw'),
'deinterlace': (str, 'rw'),
'colormatrix': (str, 'rw'),
'colormatrix-input-range': (str, 'rw'),
# 'colormatrix-output-range': (str, 'rw'),
'colormatrix-primaries': (str, 'rw'),
'ontop': (bool, 'rw'),
'border': (bool, 'rw'),
'framedrop': (str, 'rw'),
'gamma': (float, 'rw'),
'brightness': (int, 'rw'),
'contrast': (int, 'rw'),
'saturation': (int, 'rw'),
'hue': (int, 'rw'),
'hwdec': (str, 'rw'),
'panscan': (float, 'rw'),
'video-format': (str, 'r'),
'video-codec': (str, 'r'),
'video-bitrate': (float, 'r'),
'packet-video-bitrate': (float, 'r'),
'width': (int, 'r'),
'height': (int, 'r'),
'dwidth': (int, 'r'),
'dheight': (int, 'r'),
'fps': (float, 'r'),
'estimated-vf-fps': (float, 'r'),
'window-scale': (float, 'rw'),
'video-aspect': (str, 'rw'),
'osd-width': (int, 'r'),
'osd-height': (int, 'r'),
'osd-par': (float, 'r'),
'vid': (str, 'rw'),
'video': (str, 'rw'), # alias for vid
'video-align-x': (float, 'rw'),
'video-align-y': (float, 'rw'),
'video-pan-x': (float, 'rw'),
'video-pan-y': (float, 'rw'),
'video-zoom': (float, 'rw'),
'video-unscaled': (bool, 'w'),
'video-speed-correction': (float, 'r'),
'program': (int, 'w'),
'sid': (str, 'rw'),
'sub': (str, 'rw'), # alias for sid
'secondary-sid': (str, 'rw'),
'sub-delay': (float, 'rw'),
'sub-pos': (int, 'rw'),
'sub-visibility': (bool, 'rw'),
'sub-forced-only': (bool, 'rw'),
'sub-scale': (float, 'rw'),
'sub-bitrate': (float, 'r'),
'packet-sub-bitrate': (float, 'r'),
# 'ass-use-margins': (bool, 'rw'),
'ass-vsfilter-aspect-compat': (bool, 'rw'),
'ass-style-override': (bool, 'rw'),
'stream-capture': (str, 'rw'),
'tv-brightness': (int, 'rw'),
'tv-contrast': (int, 'rw'),
'tv-saturation': (int, 'rw'),
'tv-hue': (int, 'rw'),
'playlist-pos': (int, 'rw'),
'playlist-pos-1': (int, 'rw'), # ugh.
'playlist-count': (int, 'r'),
# 'quvi-format': (str, 'rw'),
'seekable': (bool, 'r'),
'seeking': (bool, 'r'),
'partially-seekable': (bool, 'r'),
'playback-abort': (bool, 'r'),
'cursor-autohide': (str, 'rw'),
'audio-device': (str, 'rw'),
'current-vo': (str, 'r'),
'current-ao': (str, 'r'),
'audio-out-detected-device': (str, 'r'),
'protocol-list': (str, 'r'),
'mpv-version': (str, 'r'),
'mpv-configuration': (str, 'r'),
'ffmpeg-version': (str, 'r'),
'display-sync-active': (bool, 'r'),
'stream-open-filename': (bytes, 'rw'), # Undocumented
'file-format': (commalist,'r'), # Be careful with this one.
'mistimed-frame-count': (int, 'r'),
'vsync-ratio': (float, 'r'),
'vo-drop-frame-count': (int, 'r'),
'vo-delayed-frame-count': (int, 'r'),
'playback-time': (float, 'rw'),
'demuxer-cache-duration': (float, 'r'),
'demuxer-cache-time': (float, 'r'),
'demuxer-cache-idle': (bool, 'r'),
'idle': (bool, 'r'),
'disc-title-list': (commalist,'r'),
'field-dominance': (str, 'rw'),
'taskbar-progress': (bool, 'rw'),
'on-all-workspaces': (bool, 'rw'),
'video-output-levels': (str, 'r'),
'vo-configured': (bool, 'r'),
'hwdec-current': (str, 'r'),
'hwdec-interop': (str, 'r'),
'estimated-frame-count': (int, 'r'),
'estimated-frame-number': (int, 'r'),
'sub-use-margins': (bool, 'rw'),
'ass-force-margins': (bool, 'rw'),
'video-rotate': (str, 'rw'),
'video-stereo-mode': (str, 'rw'),
'ab-loop-a': (str, 'r'), # What a mess...
'ab-loop-b': (str, 'r'),
'dvb-channel': (str, 'w'),
'dvb-channel-name': (str, 'rw'),
'window-minimized': (bool, 'r'),
'display-names': (commalist, 'r'),
'display-fps': (float, 'r'), # access apparently misdocumented in the manpage
'estimated-display-fps': (float, 'r'),
'vsync-jitter': (float, 'r'),
'video-params': (node, 'r', True),
'video-out-params': (node, 'r', True),
'track-list': (node, 'r', False),
'playlist': (node, 'r', False),
'chapter-list': (node, 'r', False),
'vo-performance': (node, 'r', True),
'filtered-metadata': (node, 'r', False),
'metadata': (node, 'r', False),
'chapter-metadata': (node, 'r', False),
'vf-metadata': (node, 'r', False),
'af-metadata': (node, 'r', False),
'edition-list': (node, 'r', False),
'disc-titles': (node, 'r', False),
'audio-params': (node, 'r', True),
'audio-out-params': (node, 'r', True),
'audio-device-list': (node, 'r', True),
'video-frame-info': (node, 'r', True),
'decoder-list': (node, 'r', True),
'encoder-list': (node, 'r', True),
'vf': (node, 'r', True),
'af': (node, 'r', True),
'options': (node, 'r', True),
'file-local-options': (node, 'r', True),
'property-list': (commalist,'r')}
def bindproperty(MPV, name, proptype, access, decode_str=False):
getter = lambda self: self._get_property(name, proptype, decode_str)
setter = lambda self, value: self._set_property(name, value, proptype)
def barf(*args):
raise NotImplementedError('Access denied')
setattr(MPV, name.replace('-', '_'), property(getter if 'r' in access else barf, setter if 'w' in access else barf))
for name, (proptype, access, *args) in ALL_PROPERTIES.items():
bindproperty(MPV, name, proptype, access, *args)
| Frechdachs/python-mpv | mpv.py | Python | agpl-3.0 | 42,232 |
# -*- coding: utf-8 -*-
"""
2020-09-07 Cornelius Kölbel <cornelius.koelbel@netknights.it>
Add exception
2017-04-26 Friedrich Weber <friedrich.weber@netknights.it>
Make it possible to check for correct LDAPS/STARTTLS settings
2017-01-08 Cornelius Kölbel <cornelius.koelbel@netknights.it>
Remove objectGUID. Since we stick with ldap3 version 2.1,
the objectGUID is returned in a human readable format.
2016-12-05 Martin Wheldon <martin.wheldon@greenhills-it.co.uk>
Fixed issue creating ldap entries with objectClasses defined
Fix problem when searching for attribute values containing the
space character.
2016-05-26 Martin Wheldon <martin.wheldon@greenhills-it.co.uk>
Rewrite of search functionality to add recursive parsing
of ldap search filters
Fixed issue searching for attributes with multiple values
Added ability to use ~= in searches
Created unittests for mock
2016-02-19 Cornelius Kölbel <cornelius.koelbel@netknights.it>
Add the possibility to check objectGUID
2015-01-31 Change responses.py to be able to run with SMTP
Cornelius Kölbel <cornelius@privacyidea.org>
Original responses.py is:
Copyright 2013 Dropbox, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import (
absolute_import, division, unicode_literals
)
from passlib.hash import ldap_salted_sha1
from ast import literal_eval
import uuid
from ldap3.utils.conv import escape_bytes
import ldap3
import re
import pyparsing
from .smtpmock import get_wrapped
from collections import namedtuple, Sequence, Sized
from privacyidea.lib.utils import to_bytes, to_unicode
DIRECTORY = "tests/testdata/tmp_directory"
Call = namedtuple('Call', ['request', 'response'])
_wrapper_template = """\
def wrapper%(signature)s:
with ldap3mock:
return func%(funcargs)s
"""
def _convert_objectGUID(item):
item = uuid.UUID("{{{0!s}}}".format(item)).bytes_le
item = escape_bytes(item)
return item
class CallList(Sequence, Sized):
def __init__(self):
self._calls = []
def __iter__(self):
return iter(self._calls)
def __len__(self):
return len(self._calls)
def __getitem__(self, idx):
return self._calls[idx]
def setdata(self, request, response):
self._calls.append(Call(request, response))
def reset(self):
self._calls = []
class Connection(object):
class Extend(object):
class Standard(object):
def __init__(self, connection):
self.connection = connection
def paged_search(self, **kwargs):
self.connection.search(search_base=kwargs.get("search_base"),
search_scope=kwargs.get("search_scope"),
search_filter=kwargs.get(
"search_filter"),
attributes=kwargs.get("attributes"),
paged_size=kwargs.get("page_size"),
size_limit=kwargs.get("size_limit"),
paged_cookie=None)
result = self.connection.response
if kwargs.get("generator", False):
# If ``generator=True`` is passed, ``paged_search`` should return an iterator.
result = iter(result)
return result
def __init__(self, connection):
self.standard = self.Standard(connection)
def __init__(self, directory=None):
if directory is None:
directory = []
import copy
self.directory = copy.deepcopy(directory)
self.bound = False
self.start_tls_called = False
self.extend = self.Extend(self)
self.operation = {
"!" : self._search_not,
"&" : self._search_and,
"|" : self._search_or,
}
def set_directory(self, directory):
self.directory = directory
def _find_user(self, dn):
return next(i for (i, d) in enumerate(self.directory) if d["dn"] == dn)
@staticmethod
def open(read_server_info=True):
return
def bind(self, read_server_info=True):
return self.bound
def start_tls(self, read_server_info=True):
self.start_tls_called = True
def add(self, dn, object_class=None, attributes=None):
self.result = { 'dn' : '',
'referrals' : None,
'description' : 'success',
'result' : 0,
'message' : '',
'type' : 'addResponse'}
# Check to see if the user exists in the directory
try:
index = self._find_user(dn)
except StopIteration:
# If we get here the user doesn't exist so continue
# Create a entry object for the new user
entry = {}
entry['dn'] = dn
entry['attributes'] = attributes
if object_class != None:
entry['attributes'].update( {'objectClass': object_class} )
else:
# User already exists
self.result["description"] = "failure"
self.result["result"] = 68
self.result["message"] = \
"Error entryAlreadyExists for {0}".format(dn)
return False
# Add the user entry to the directory
self.directory.append(entry)
# Attempt to write changes to disk
with open(DIRECTORY, 'w+') as f:
f.write(str(self.directory))
return True
def delete(self, dn, controls=None):
self.result = { 'dn' : '',
'referrals' : None,
'description' : 'success',
'result' : 0,
'message' : '',
'type' : 'addResponse'}
# Check to see if the user exists in the directory
try:
index = self._find_user(dn)
except StopIteration:
# If we get here the user doesn't exist so continue
self.result["description"] = "failure"
self.result["result"] = 32
self.result["message"] = "Error no such object: {0}".format(dn)
return False
# Delete the entry object for the user
self.directory.pop(index)
# Attempt to write changes to disk
with open(DIRECTORY, 'w+') as f:
f.write(str(self.directory))
return True
def modify(self, dn, changes, controls=None):
self.result = { 'dn' : '',
'referrals' : None,
'description' : 'success',
'result' : 0,
'message' : '',
'type' : 'modifyResponse'}
# Check to see if the user exists in the directory
try:
index = self._find_user(dn)
except StopIteration:
# If we get here the user doesn't exist so continue
self.result["description"] = "failure"
self.result["result"] = 32
self.result["message"] = "Error no such object: {0!s}".format(dn)
return False
# extract the hash we are interested in
entry = self.directory[index].get("attributes")
# Loop over the changes hash and apply them
for k, v in changes.items():
if v[0] == "MODIFY_DELETE":
entry.pop(k)
elif v[0] == "MODIFY_REPLACE" or v[0] == "MODIFY_ADD":
entry[k] = v[1][0]
else:
self.result["result"] = 2
self.result["message"] = "Error bad/missing/not implemented" \
"modify operation: %s" % k[1]
# Place the attributes back into the directory hash
self.directory[index]["attributes"] = entry
# Attempt to write changes to disk
with open(DIRECTORY, 'w+') as f:
f.write(str(self.directory))
return True
@staticmethod
def _match_greater_than_or_equal(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) >= str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_greater_than(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) > str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_less_than_or_equal(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) <= str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_less_than(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) < str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_equal_to(search_base, attribute, value, candidates):
matches = list()
match_using_regex = False
if "*" in value:
match_using_regex = True
#regex = check_escape(value)
regex = value.replace('*', '.*')
regex = "^{0}$".format(regex)
for entry in candidates:
dn = to_unicode(entry.get("dn"))
if attribute not in entry.get("attributes") or not dn.endswith(search_base):
continue
values_from_directory = entry.get("attributes").get(attribute)
if isinstance(values_from_directory, list):
for item in values_from_directory:
if attribute == "objectGUID":
item = _convert_objectGUID(item)
if match_using_regex:
m = re.match(regex, str(item), re.I)
if m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if item == value:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if attribute == "objectGUID":
values_from_directory = _convert_objectGUID(values_from_directory)
if match_using_regex:
m = re.match(regex, str(values_from_directory), re.I)
if m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
# The value, which we compare is unicode, so we convert
# the values_from_directory to unicode rather than str.
if isinstance(values_from_directory, bytes):
values_from_directory = values_from_directory.decode(
"utf-8")
elif type(values_from_directory) == int:
values_from_directory = u"{0!s}".format(values_from_directory)
if value == values_from_directory:
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_notequal_to(search_base, attribute, value, candidates):
matches = list()
match_using_regex = False
if "*" in value:
match_using_regex = True
#regex = check_escape(value)
regex = value.replace('*', '.*')
regex = "^{0}$".format(regex)
for entry in candidates:
found = False
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
values_from_directory = entry.get("attributes").get(attribute)
if isinstance(values_from_directory, list):
for item in values_from_directory:
if attribute == "objectGUID":
item = _convert_objectGUID(item)
if match_using_regex:
m = re.match(regex, str(item), re.I)
if m:
found = True
else:
if item == value:
found = True
if found is False:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if attribute == "objectGUID":
values_from_directory = _convert_objectGUID(values_from_directory)
if match_using_regex:
m = re.match(regex, str(values_from_directory), re.I)
if not m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if str(value) != str(values_from_directory):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _parse_filter():
op = pyparsing.oneOf('! & |')
lpar = pyparsing.Literal('(').suppress()
rpar = pyparsing.Literal(')').suppress()
k = pyparsing.Word(pyparsing.alphanums)
# NOTE: We may need to expand on this list, but as this is not a real
# LDAP server we should be OK.
# Value to contain:
# numbers, upper/lower case letters, astrisk, at symbol, minus, full
# stop, backslash or a space
v = pyparsing.Word(pyparsing.alphanums + "-*@.\\ äöü")
rel = pyparsing.oneOf("= ~= >= <=")
expr = pyparsing.Forward()
atom = pyparsing.Group(lpar + op + expr + rpar) \
| pyparsing.Combine(lpar + k + rel + v + rpar)
expr << atom + pyparsing.ZeroOrMore( expr )
return expr
@staticmethod
def _deDuplicate(results):
found = dict()
deDuped = list()
for entry in results:
dn = entry.get("dn")
if not dn in found:
found[dn] = 1
deDuped.append(entry)
return deDuped
def _invert_results(self, candidates):
inverted_candidates = list(self.directory)
for candidate in candidates:
try:
inverted_candidates.remove(candidate)
except ValueError:
pass
return inverted_candidates
def _search_not(self, base, search_filter, candidates=None):
# Create empty candidates list as we need to use self.directory for
# each search
candidates = list()
this_filter = list()
index = 0
search_filter.remove("!")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates = self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates = self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
candidates = self._invert_results(candidates)
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates = Connection._match_less_than(base, k, v,
self.directory)
elif "<=" in item:
k, v = item.split("<=")
candidates = Connection._match_greater_than(base, k, v,
self.directory)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates = Connection._match_notequal_to(base, k, v,
self.directory)
elif "=" in item:
k, v = item.split("=")
candidates = Connection._match_notequal_to(base, k, v,
self.directory)
return candidates
def _search_and(self, base, search_filter, candidates=None):
# Load the data from the directory, if we aren't passed any
if candidates == [] or candidates is None:
candidates = self.directory
this_filter = list()
index = 0
search_filter.remove("&")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates = self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates = self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates = Connection._match_greater_than_or_equal(base, k, v,
candidates)
elif "<=" in item:
k, v = item.split("<=")
candidates = Connection._match_less_than_or_equal(base, k, v,
candidates)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates = Connection._match_equal_to(base, k, v,
candidates)
elif "=" in item:
k, v = item.split("=")
candidates = Connection._match_equal_to(base, k, v,
candidates)
return candidates
def _search_or(self, base, search_filter, candidates=None):
# Create empty candidates list as we need to use self.directory for
# each search
candidates = list()
this_filter = list()
index = 0
search_filter.remove("|")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates += self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates += self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates += Connection._match_greater_than_or_equal(base, k, v,
self.directory)
elif "<=" in item:
k, v = item.split("<=")
candidates += Connection._match_less_than_or_equal(base, k, v,
self.directory)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates += Connection._match_equal_to(base, k, v,
self.directory)
elif "=" in item:
k, v = item.split("=")
candidates += Connection._match_equal_to(base, k, v,
self.directory)
return candidates
def search(self, search_base=None, search_scope=None,
search_filter=None, attributes=None, paged_size=5,
size_limit=0, paged_cookie=None):
s_filter = list()
candidates = list()
self.response = list()
self.result = dict()
try:
if isinstance(search_filter, bytes):
# We need to convert to unicode otherwise pyparsing will not
# find the u"ö"
search_filter = to_unicode(search_filter)
expr = Connection._parse_filter()
s_filter = expr.parseString(search_filter).asList()[0]
except pyparsing.ParseBaseException as exx:
# Just for debugging purposes
s = "{!s}".format(exx)
for item in s_filter:
if item[0] in self.operation:
candidates = self.operation.get(item[0])(search_base,
s_filter)
self.response = Connection._deDuplicate(candidates)
return True
def unbind(self):
return True
class Ldap3Mock(object):
def __init__(self):
self._calls = CallList()
self._server_mock = None
self.directory = []
self.exception = None
self.reset()
def reset(self):
self._calls.reset()
def setLDAPDirectory(self, directory=None):
if directory is None:
self.directory = []
else:
try:
with open(DIRECTORY, 'w+') as f:
f.write(str(directory))
self.directory = directory
except OSError as e:
raise
def set_exception(self, exc=True):
self.exception = exc
def _load_data(self, directory):
try:
with open(directory, 'r') as f:
data = f.read()
return literal_eval(data)
except OSError as e:
raise
@property
def calls(self):
return self._calls
def __enter__(self):
self.start()
def __exit__(self, *args):
self.stop()
self.reset()
def activate(self, func):
evaldict = {'ldap3mock': self, 'func': func}
return get_wrapped(func, _wrapper_template, evaldict)
def _on_Server(self, host, port, use_ssl, connect_timeout, get_info=None,
tls=None):
# mangle request packet
return "FakeServerObject"
def _on_Connection(self, server, user, password,
auto_bind=None, client_strategy=None,
authentication=None, check_names=None,
auto_referrals=None, receive_timeout=None):
"""
We need to create a Connection object with
methods:
add()
modify()
search()
unbind()
and object
response
"""
# Raise an exception, if we are told to do so
if self.exception:
raise Exception("LDAP request failed")
# check the password
correct_password = False
# Anonymous bind
# Reload the directory just in case a change has been made to
# user credentials
self.directory = self._load_data(DIRECTORY)
if authentication == ldap3.ANONYMOUS and user == "":
correct_password = True
for entry in self.directory:
if to_unicode(entry.get("dn")) == user:
pw = entry.get("attributes").get("userPassword")
# password can be unicode
if to_bytes(pw) == to_bytes(password):
correct_password = True
elif pw.startswith('{SSHA}'):
correct_password = ldap_salted_sha1.verify(password, pw)
else:
correct_password = False
self.con_obj = Connection(self.directory)
self.con_obj.bound = correct_password
return self.con_obj
def start(self):
import mock
def unbound_on_Server(host, port,
use_ssl,
connect_timeout, *a, **kwargs):
return self._on_Server(host, port,
use_ssl,
connect_timeout, *a, **kwargs)
self._server_mock = mock.MagicMock()
self._server_mock.side_effect = unbound_on_Server
self._patcher = mock.patch('ldap3.Server',
self._server_mock)
self._patcher.start()
def unbound_on_Connection(server, user,
password,
auto_bind,
client_strategy,
authentication,
check_names,
auto_referrals, *a, **kwargs):
return self._on_Connection(server, user,
password,
auto_bind,
client_strategy,
authentication,
check_names,
auto_referrals, *a,
**kwargs)
self._patcher2 = mock.patch('ldap3.Connection',
unbound_on_Connection)
self._patcher2.start()
def stop(self):
self._patcher.stop()
self._patcher2.stop()
self._server_mock = None
def get_server_mock(self):
return self._server_mock
# expose default mock namespace
mock = _default_mock = Ldap3Mock()
__all__ = []
for __attr in (a for a in dir(_default_mock) if not a.startswith('_')):
__all__.append(__attr)
globals()[__attr] = getattr(_default_mock, __attr)
| privacyidea/privacyidea | tests/ldap3mock.py | Python | agpl-3.0 | 28,972 |
# -*- coding: utf-8 -*-
"""
Models for Student Identity Verification
This is where we put any models relating to establishing the real-life identity
of a student over a period of time. Right now, the only models are the abstract
`PhotoVerification`, and its one concrete implementation
`SoftwareSecurePhotoVerification`. The hope is to keep as much of the
photo verification process as generic as possible.
"""
import functools
import json
import logging
import os.path
import uuid
from datetime import timedelta
from email.utils import formatdate
import requests
import six
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.files.base import ContentFile
from django.urls import reverse
from django.db import models
from django.dispatch import receiver
from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy
from model_utils import Choices
from model_utils.models import StatusModel, TimeStampedModel
from opaque_keys.edx.django.models import CourseKeyField
from lms.djangoapps.verify_student.ssencrypt import (
encrypt_and_encode,
generate_signed_message,
random_aes_key,
rsa_encrypt
)
from openedx.core.djangoapps.signals.signals import LEARNER_NOW_VERIFIED
from openedx.core.storage import get_storage
from .utils import earliest_allowed_verification_date
log = logging.getLogger(__name__)
def generateUUID(): # pylint: disable=invalid-name
""" Utility function; generates UUIDs """
return str(uuid.uuid4())
class VerificationException(Exception):
pass
def status_before_must_be(*valid_start_statuses):
"""
Helper decorator with arguments to make sure that an object with a `status`
attribute is in one of a list of acceptable status states before a method
is called. You could use it in a class definition like:
@status_before_must_be("submitted", "approved", "denied")
def refund_user(self, user_id):
# Do logic here...
If the object has a status that is not listed when the `refund_user` method
is invoked, it will throw a `VerificationException`. This is just to avoid
distracting boilerplate when looking at a Model that needs to go through a
workflow process.
"""
def decorator_func(func):
"""
Decorator function that gets returned
"""
@functools.wraps(func)
def with_status_check(obj, *args, **kwargs):
if obj.status not in valid_start_statuses:
exception_msg = (
u"Error calling {} {}: status is '{}', must be one of: {}"
).format(func, obj, obj.status, valid_start_statuses)
raise VerificationException(exception_msg)
return func(obj, *args, **kwargs)
return with_status_check
return decorator_func
class IDVerificationAttempt(StatusModel):
"""
Each IDVerificationAttempt represents a Student's attempt to establish
their identity through one of several methods that inherit from this Model,
including PhotoVerification and SSOVerification.
.. pii: The User's name is stored in this and sub-models
.. pii_types: name
.. pii_retirement: retained
"""
STATUS = Choices('created', 'ready', 'submitted', 'must_retry', 'approved', 'denied')
user = models.ForeignKey(User, db_index=True, on_delete=models.CASCADE)
# They can change their name later on, so we want to copy the value here so
# we always preserve what it was at the time they requested. We only copy
# this value during the mark_ready() step. Prior to that, you should be
# displaying the user's name from their user.profile.name.
name = models.CharField(blank=True, max_length=255)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True, db_index=True)
class Meta(object):
app_label = "verify_student"
abstract = True
ordering = ['-created_at']
@property
def expiration_datetime(self):
"""Datetime that the verification will expire. """
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
return self.created_at + timedelta(days=days_good_for)
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
raise NotImplementedError
def active_at_datetime(self, deadline):
"""Check whether the verification was active at a particular datetime.
Arguments:
deadline (datetime): The date at which the verification was active
(created before and expiration datetime is after today).
Returns:
bool
"""
return (
self.created_at < deadline and
self.expiration_datetime > now()
)
class ManualVerification(IDVerificationAttempt):
"""
Each ManualVerification represents a user's verification that bypasses the need for
any other verification.
.. pii: The User's name is stored in the parent model
.. pii_types: name
.. pii_retirement: retained
"""
reason = models.CharField(
max_length=255,
blank=True,
help_text=(
'Specifies the reason for manual verification of the user.'
)
)
class Meta(object):
app_label = 'verify_student'
def __unicode__(self):
return 'ManualIDVerification for {name}, status: {status}'.format(
name=self.name,
status=self.status,
)
def should_display_status_to_user(self):
"""
Whether or not the status should be displayed to the user.
"""
return False
class SSOVerification(IDVerificationAttempt):
"""
Each SSOVerification represents a Student's attempt to establish their identity
by signing in with SSO. ID verification through SSO bypasses the need for
photo verification.
.. no_pii:
"""
OAUTH2 = 'third_party_auth.models.OAuth2ProviderConfig'
SAML = 'third_party_auth.models.SAMLProviderConfig'
LTI = 'third_party_auth.models.LTIProviderConfig'
IDENTITY_PROVIDER_TYPE_CHOICES = (
(OAUTH2, 'OAuth2 Provider'),
(SAML, 'SAML Provider'),
(LTI, 'LTI Provider'),
)
identity_provider_type = models.CharField(
max_length=100,
blank=False,
choices=IDENTITY_PROVIDER_TYPE_CHOICES,
default=SAML,
help_text=(
'Specifies which type of Identity Provider this verification originated from.'
)
)
identity_provider_slug = models.SlugField(
max_length=30, db_index=True, default='default',
help_text=(
'The slug uniquely identifying the Identity Provider this verification originated from.'
))
class Meta(object):
app_label = "verify_student"
def __unicode__(self):
return 'SSOIDVerification for {name}, status: {status}'.format(
name=self.name,
status=self.status,
)
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
return False
class PhotoVerification(IDVerificationAttempt):
"""
Each PhotoVerification represents a Student's attempt to establish
their identity by uploading a photo of themselves and a picture ID. An
attempt actually has a number of fields that need to be filled out at
different steps of the approval process. While it's useful as a Django Model
for the querying facilities, **you should only edit a `PhotoVerification`
object through the methods provided**. Initialize them with a user:
attempt = PhotoVerification(user=user)
We track this attempt through various states:
`created`
Initial creation and state we're in after uploading the images.
`ready`
The user has uploaded their images and checked that they can read the
images. There's a separate state here because it may be the case that we
don't actually submit this attempt for review until payment is made.
`submitted`
Submitted for review. The review may be done by a staff member or an
external service. The user cannot make changes once in this state.
`must_retry`
We submitted this, but there was an error on submission (i.e. we did not
get a 200 when we POSTed to Software Secure)
`approved`
An admin or an external service has confirmed that the user's photo and
photo ID match up, and that the photo ID's name matches the user's.
`denied`
The request has been denied. See `error_msg` for details on why. An
admin might later override this and change to `approved`, but the
student cannot re-open this attempt -- they have to create another
attempt and submit it instead.
Because this Model inherits from IDVerificationAttempt, which inherits
from StatusModel, we can also do things like:
attempt.status == PhotoVerification.STATUS.created
attempt.status == "created"
pending_requests = PhotoVerification.submitted.all()
.. pii: The User's name is stored in the parent model, this one stores links to face and photo ID images
.. pii_types: name, image
.. pii_retirement: retained
"""
######################## Fields Set During Creation ########################
# See class docstring for description of status states
# Where we place the uploaded image files (e.g. S3 URLs)
face_image_url = models.URLField(blank=True, max_length=255)
photo_id_image_url = models.URLField(blank=True, max_length=255)
# Randomly generated UUID so that external services can post back the
# results of checking a user's photo submission without use exposing actual
# user IDs or something too easily guessable.
receipt_id = models.CharField(
db_index=True,
default=generateUUID,
max_length=255,
)
# Indicates whether or not a user wants to see the verification status
# displayed on their dash. Right now, only relevant for allowing students
# to "dismiss" a failed midcourse reverification message
# TODO: This field is deprecated.
display = models.BooleanField(db_index=True, default=True)
######################## Fields Set When Submitting ########################
submitted_at = models.DateTimeField(null=True, db_index=True)
#################### Fields Set During Approval/Denial #####################
# If the review was done by an internal staff member, mark who it was.
reviewing_user = models.ForeignKey(
User,
db_index=True,
default=None,
null=True,
related_name="photo_verifications_reviewed",
on_delete=models.CASCADE,
)
# Mark the name of the service used to evaluate this attempt (e.g
# Software Secure).
reviewing_service = models.CharField(blank=True, max_length=255)
# If status is "denied", this should contain text explaining why.
error_msg = models.TextField(blank=True)
# Non-required field. External services can add any arbitrary codes as time
# goes on. We don't try to define an exhuastive list -- this is just
# capturing it so that we can later query for the common problems.
error_code = models.CharField(blank=True, max_length=50)
class Meta(object):
app_label = "verify_student"
abstract = True
ordering = ['-created_at']
def parsed_error_msg(self):
"""
Sometimes, the error message we've received needs to be parsed into
something more human readable
The default behavior is to return the current error message as is.
"""
return self.error_msg
@status_before_must_be("created")
def upload_face_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def upload_photo_id_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def mark_ready(self):
"""
Mark that the user data in this attempt is correct. In order to
succeed, the user must have uploaded the necessary images
(`face_image_url`, `photo_id_image_url`). This method will also copy
their name from their user profile. Prior to marking it ready, we read
this value directly from their profile, since they're free to change it.
This often happens because people put in less formal versions of their
name on signup, but realize they want something different to go on a
formal document.
Valid attempt statuses when calling this method:
`created`
Status after method completes: `ready`
Other fields that will be set by this method:
`name`
State Transitions:
`created` → `ready`
This is what happens when the user confirms to us that the pictures
they uploaded are good. Note that we don't actually do a submission
anywhere yet.
"""
# At any point prior to this, they can change their names via their
# student dashboard. But at this point, we lock the value into the
# attempt.
self.name = self.user.profile.name
self.status = "ready"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def approve(self, user_id=None, service=""):
"""
Approve this attempt. `user_id`
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `approved`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`
State Transitions:
`submitted` → `approved`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `approved`
No-op. First one to approve it wins.
`denied` → `approved`
This might happen if a staff member wants to override a decision
made by an external service or another staff member (say, in
response to a support request). In this case, the previous values
of `reviewed_by_user_id` and `reviewed_by_service` will be changed
to whoever is doing the approving, and `error_msg` will be reset.
The only record that this record was ever denied would be in our
logs. This should be a relatively rare occurence.
"""
# If someone approves an outdated version of this, the first one wins
if self.status == "approved":
return
log.info(u"Verification for user '{user_id}' approved by '{reviewer}'.".format(
user_id=self.user, reviewer=user_id
))
self.error_msg = "" # reset, in case this attempt was denied before
self.error_code = "" # reset, in case this attempt was denied before
self.reviewing_user = user_id
self.reviewing_service = service
self.status = "approved"
self.save()
# Emit signal to find and generate eligible certificates
LEARNER_NOW_VERIFIED.send_robust(
sender=PhotoVerification,
user=self.user
)
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def deny(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Deny this attempt.
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `denied`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`,
`error_code`
State Transitions:
`submitted` → `denied`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `denied`
This might happen if a staff member wants to override a decision
made by an external service or another staff member, or just correct
a mistake made during the approval process. In this case, the
previous values of `reviewed_by_user_id` and `reviewed_by_service`
will be changed to whoever is doing the denying. The only record
that this record was ever approved would be in our logs. This should
be a relatively rare occurence.
`denied` → `denied`
Update the error message and reviewing_user/reviewing_service. Just
lets you amend the error message in case there were additional
details to be made.
"""
log.info(u"Verification for user '{user_id}' denied by '{reviewer}'.".format(
user_id=self.user, reviewer=reviewing_user
))
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "denied"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def system_error(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Mark that this attempt could not be completed because of a system error.
Status should be moved to `must_retry`. For example, if Software Secure
reported to us that they couldn't process our submission because they
couldn't decrypt the image we sent.
"""
if self.status in ["approved", "denied"]:
return # If we were already approved or denied, just leave it.
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "must_retry"
self.save()
@classmethod
def retire_user(cls, user_id):
"""
Retire user as part of GDPR Phase I
Returns 'True' if records found
:param user_id: int
:return: bool
"""
try:
user_obj = User.objects.get(id=user_id)
except User.DoesNotExist:
return False
photo_objects = cls.objects.filter(
user=user_obj
).update(
name='',
face_image_url='',
photo_id_image_url='',
photo_id_key=''
)
return photo_objects > 0
class SoftwareSecurePhotoVerification(PhotoVerification):
"""
Model to verify identity using a service provided by Software Secure. Much
of the logic is inherited from `PhotoVerification`, but this class
encrypts the photos.
Software Secure (http://www.softwaresecure.com/) is a remote proctoring
service that also does identity verification. A student uses their webcam
to upload two images: one of their face, one of a photo ID. Due to the
sensitive nature of the data, the following security precautions are taken:
1. The snapshot of their face is encrypted using AES-256 in CBC mode. All
face photos are encypted with the same key, and this key is known to
both Software Secure and edx-platform.
2. The snapshot of a user's photo ID is also encrypted using AES-256, but
the key is randomly generated using os.urandom. Every verification
attempt has a new key. The AES key is then encrypted using a public key
provided by Software Secure. We store only the RSA-encryped AES key.
Since edx-platform does not have Software Secure's private RSA key, it
means that we can no longer even read photo ID.
3. The encrypted photos are base64 encoded and stored in an S3 bucket that
edx-platform does not have read access to.
Note: this model handles *inital* verifications (which you must perform
at the time you register for a verified cert).
.. pii: The User's name is stored in the parent model, this one stores links to face and photo ID images
.. pii_types: name, image
.. pii_retirement: retained
"""
# This is a base64.urlsafe_encode(rsa_encrypt(photo_id_aes_key), ss_pub_key)
# So first we generate a random AES-256 key to encrypt our photo ID with.
# Then we RSA encrypt it with Software Secure's public key. Then we base64
# encode that. The result is saved here. Actual expected length is 344.
photo_id_key = models.TextField(max_length=1024)
IMAGE_LINK_DURATION = 5 * 60 * 60 * 24 # 5 days in seconds
copy_id_photo_from = models.ForeignKey("self", null=True, blank=True, on_delete=models.CASCADE)
# Fields for functionality of sending email when verification expires
# expiry_date: The date when the SoftwareSecurePhotoVerification will expire
# expiry_email_date: This field is used to maintain a check for learners to which email
# to notify for expired verification is already sent.
expiry_date = models.DateTimeField(null=True, blank=True, db_index=True)
expiry_email_date = models.DateTimeField(null=True, blank=True, db_index=True)
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def approve(self, user_id=None, service=""):
"""
Approve the verification attempt for user
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
After method completes:
status is set to `approved`
expiry_date is set to one year from now
"""
self.expiry_date = now() + timedelta(
days=settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
)
super(SoftwareSecurePhotoVerification, self).approve(user_id, service)
@classmethod
def get_initial_verification(cls, user, earliest_allowed_date=None):
"""Get initial verification for a user with the 'photo_id_key'.
Arguments:
user(User): user object
earliest_allowed_date(datetime): override expiration date for initial verification
Return:
SoftwareSecurePhotoVerification (object) or None
"""
init_verification = cls.objects.filter(
user=user,
status__in=["submitted", "approved"],
created_at__gte=(
earliest_allowed_date or earliest_allowed_verification_date()
)
).exclude(photo_id_key='')
return init_verification.latest('created_at') if init_verification.exists() else None
@status_before_must_be("created")
def upload_face_image(self, img_data):
"""
Upload an image of the user's face. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to the
storage backend.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
return
aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
aes_key = aes_key_str.decode("hex")
path = self._get_path("face")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
@status_before_must_be("created")
def upload_photo_id_image(self, img_data):
"""
Upload an the user's photo ID image. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using a randomly generated AES key, encode it with base64 and save it
to the storage backend. The random key is also encrypted using Software
Secure's public RSA key and stored in our `photo_id_key` field.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
# fake photo id key is set only for initial verification
self.photo_id_key = 'fake-photo-id-key'
self.save()
return
aes_key = random_aes_key()
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_aes_key = rsa_encrypt(aes_key, rsa_key_str)
# Save this to the storage backend
path = self._get_path("photo_id")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
# Update our record fields
self.photo_id_key = rsa_encrypted_aes_key.encode('base64')
self.save()
@status_before_must_be("must_retry", "ready", "submitted")
def submit(self, copy_id_photo_from=None):
"""
Submit our verification attempt to Software Secure for validation. This
will set our status to "submitted" if the post is successful, and
"must_retry" if the post fails.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
"""
try:
response = self.send_request(copy_id_photo_from=copy_id_photo_from)
if response.ok:
self.submitted_at = now()
self.status = "submitted"
self.save()
else:
self.status = "must_retry"
self.error_msg = response.text
self.save()
except Exception: # pylint: disable=broad-except
log.exception(
u'Software Secure submission failed for user %s, setting status to must_retry',
self.user.username
)
self.status = "must_retry"
self.save()
def parsed_error_msg(self):
"""
Parse the error messages we receive from SoftwareSecure
Error messages are written in the form:
`[{"photoIdReasons": ["Not provided"]}]`
Returns:
str[]: List of error messages.
"""
parsed_errors = []
error_map = {
'EdX name not provided': 'name_mismatch',
'Name mismatch': 'name_mismatch',
'Photo/ID Photo mismatch': 'photos_mismatched',
'ID name not provided': 'id_image_missing_name',
'Invalid Id': 'id_invalid',
'No text': 'id_invalid',
'Not provided': 'id_image_missing',
'Photo hidden/No photo': 'id_image_not_clear',
'Text not clear': 'id_image_not_clear',
'Face out of view': 'user_image_not_clear',
'Image not clear': 'user_image_not_clear',
'Photo not provided': 'user_image_missing',
}
try:
messages = set()
message_groups = json.loads(self.error_msg)
for message_group in message_groups:
messages = messages.union(set(*six.itervalues(message_group)))
for message in messages:
parsed_error = error_map.get(message)
if parsed_error:
parsed_errors.append(parsed_error)
else:
log.debug(u'Ignoring photo verification error message: %s', message)
except Exception: # pylint: disable=broad-except
log.exception(u'Failed to parse error message for SoftwareSecurePhotoVerification %d', self.pk)
return parsed_errors
def image_url(self, name, override_receipt_id=None):
"""
We dynamically generate this, since we want it the expiration clock to
start when the message is created, not when the record is created.
Arguments:
name (str): Name of the image (e.g. "photo_id" or "face")
Keyword Arguments:
override_receipt_id (str): If provided, use this receipt ID instead
of the ID for this attempt. This is useful for reverification
where we need to construct a URL to a previously-submitted
photo ID image.
Returns:
string: The expiring URL for the image.
"""
path = self._get_path(name, override_receipt_id=override_receipt_id)
return self._storage.url(path)
@cached_property
def _storage(self):
"""
Return the configured django storage backend.
"""
config = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]
# Default to the S3 backend for backward compatibility
storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto.S3BotoStorage")
storage_kwargs = config.get("STORAGE_KWARGS", {})
# Map old settings to the parameters expected by the storage backend
if "AWS_ACCESS_KEY" in config:
storage_kwargs["access_key"] = config["AWS_ACCESS_KEY"]
if "AWS_SECRET_KEY" in config:
storage_kwargs["secret_key"] = config["AWS_SECRET_KEY"]
if "S3_BUCKET" in config:
storage_kwargs["bucket"] = config["S3_BUCKET"]
storage_kwargs["querystring_expire"] = self.IMAGE_LINK_DURATION
return get_storage(storage_class, **storage_kwargs)
def _get_path(self, prefix, override_receipt_id=None):
"""
Returns the path to a resource with this instance's `receipt_id`.
If `override_receipt_id` is given, the path to that resource will be
retrieved instead. This allows us to retrieve images submitted in
previous attempts (used for reverification, where we send a new face
photo with the same photo ID from a previous attempt).
"""
receipt_id = self.receipt_id if override_receipt_id is None else override_receipt_id
return os.path.join(prefix, receipt_id)
def _encrypted_user_photo_key_str(self):
"""
Software Secure needs to have both UserPhoto and PhotoID decrypted in
the same manner. So even though this is going to be the same for every
request, we're also using RSA encryption to encrypt the AES key for
faces.
"""
face_aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
face_aes_key = face_aes_key_str.decode("hex")
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_face_aes_key = rsa_encrypt(face_aes_key, rsa_key_str)
return rsa_encrypted_face_aes_key.encode("base64")
def create_request(self, copy_id_photo_from=None):
"""
Construct the HTTP request to the photo verification service.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
Returns:
tuple of (header, body), where both `header` and `body` are dictionaries.
"""
access_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]
secret_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"]
scheme = "https" if settings.HTTPS == "on" else "http"
callback_url = "{}://{}{}".format(
scheme, settings.SITE_NAME, reverse('verify_student_results_callback')
)
# If we're copying the photo ID image from a previous verification attempt,
# then we need to send the old image data with the correct image key.
photo_id_url = (
self.image_url("photo_id")
if copy_id_photo_from is None
else self.image_url("photo_id", override_receipt_id=copy_id_photo_from.receipt_id)
)
photo_id_key = (
self.photo_id_key
if copy_id_photo_from is None else
copy_id_photo_from.photo_id_key
)
body = {
"EdX-ID": str(self.receipt_id),
"ExpectedName": self.name,
"PhotoID": photo_id_url,
"PhotoIDKey": photo_id_key,
"SendResponseTo": callback_url,
"UserPhoto": self.image_url("face"),
"UserPhotoKey": self._encrypted_user_photo_key_str(),
}
headers = {
"Content-Type": "application/json",
"Date": formatdate(timeval=None, localtime=False, usegmt=True)
}
_message, _sig, authorization = generate_signed_message(
"POST", headers, body, access_key, secret_key
)
headers['Authorization'] = authorization
return headers, body
def request_message_txt(self):
"""
This is the body of the request we send across. This is never actually
used in the code, but exists for debugging purposes -- you can call
`print attempt.request_message_txt()` on the console and get a readable
rendering of the request that would be sent across, without actually
sending anything.
"""
headers, body = self.create_request()
header_txt = "\n".join(
u"{}: {}".format(h, v) for h, v in sorted(headers.items())
)
body_txt = json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8')
return header_txt + "\n\n" + body_txt
def send_request(self, copy_id_photo_from=None):
"""
Assembles a submission to Software Secure and sends it via HTTPS.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
Returns:
request.Response
"""
# If AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING is True, we want to
# skip posting anything to Software Secure. We actually don't even
# create the message because that would require encryption and message
# signing that rely on settings.VERIFY_STUDENT values that aren't set
# in dev. So we just pretend like we successfully posted
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
fake_response = requests.Response()
fake_response.status_code = 200
return fake_response
headers, body = self.create_request(copy_id_photo_from=copy_id_photo_from)
response = requests.post(
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_URL"],
headers=headers,
data=json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8'),
verify=False
)
log.info(u"Sent request to Software Secure for receipt ID %s.", self.receipt_id)
if copy_id_photo_from is not None:
log.info(
(
u"Software Secure attempt with receipt ID %s used the same photo ID "
u"data as the receipt with ID %s"
),
self.receipt_id, copy_id_photo_from.receipt_id
)
log.debug("Headers:\n{}\n\n".format(headers))
log.debug("Body:\n{}\n\n".format(body))
log.debug(u"Return code: {}".format(response.status_code))
log.debug(u"Return message:\n\n{}\n\n".format(response.text))
return response
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
return True
class VerificationDeadline(TimeStampedModel):
"""
Represent a verification deadline for a particular course.
The verification deadline is the datetime after which
users are no longer allowed to submit photos for initial verification
in a course.
Note that this is NOT the same as the "upgrade" deadline, after
which a user is no longer allowed to upgrade to a verified enrollment.
If no verification deadline record exists for a course,
then that course does not have a deadline. This means that users
can submit photos at any time.
.. no_pii:
"""
class Meta(object):
app_label = "verify_student"
course_key = CourseKeyField(
max_length=255,
db_index=True,
unique=True,
help_text=ugettext_lazy(u"The course for which this deadline applies"),
)
deadline = models.DateTimeField(
help_text=ugettext_lazy(
u"The datetime after which users are no longer allowed "
"to submit photos for verification."
)
)
# The system prefers to set this automatically based on default settings. But
# if the field is set manually we want a way to indicate that so we don't
# overwrite the manual setting of the field.
deadline_is_explicit = models.BooleanField(default=False)
ALL_DEADLINES_CACHE_KEY = "verify_student.all_verification_deadlines"
@classmethod
def set_deadline(cls, course_key, deadline, is_explicit=False):
"""
Configure the verification deadline for a course.
If `deadline` is `None`, then the course will have no verification
deadline. In this case, users will be able to verify for the course
at any time.
Arguments:
course_key (CourseKey): Identifier for the course.
deadline (datetime or None): The verification deadline.
"""
if deadline is None:
VerificationDeadline.objects.filter(course_key=course_key).delete()
else:
record, created = VerificationDeadline.objects.get_or_create(
course_key=course_key,
defaults={"deadline": deadline, "deadline_is_explicit": is_explicit}
)
if not created:
record.deadline = deadline
record.deadline_is_explicit = is_explicit
record.save()
@classmethod
def deadlines_for_courses(cls, course_keys):
"""
Retrieve verification deadlines for particular courses.
Arguments:
course_keys (list): List of `CourseKey`s.
Returns:
dict: Map of course keys to datetimes (verification deadlines)
"""
all_deadlines = cache.get(cls.ALL_DEADLINES_CACHE_KEY)
if all_deadlines is None:
all_deadlines = {
deadline.course_key: deadline.deadline
for deadline in VerificationDeadline.objects.all()
}
cache.set(cls.ALL_DEADLINES_CACHE_KEY, all_deadlines)
return {
course_key: all_deadlines[course_key]
for course_key in course_keys
if course_key in all_deadlines
}
@classmethod
def deadline_for_course(cls, course_key):
"""
Retrieve the verification deadline for a particular course.
Arguments:
course_key (CourseKey): The identifier for the course.
Returns:
datetime or None
"""
try:
deadline = cls.objects.get(course_key=course_key)
return deadline.deadline
except cls.DoesNotExist:
return None
@receiver(models.signals.post_save, sender=VerificationDeadline)
@receiver(models.signals.post_delete, sender=VerificationDeadline)
def invalidate_deadline_caches(sender, **kwargs): # pylint: disable=unused-argument
"""Invalidate the cached verification deadline information. """
cache.delete(VerificationDeadline.ALL_DEADLINES_CACHE_KEY)
| jolyonb/edx-platform | lms/djangoapps/verify_student/models.py | Python | agpl-3.0 | 40,982 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime
from dateutil.relativedelta import relativedelta
import time
from operator import itemgetter
from itertools import groupby
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
from openerp import netsvc
from openerp import tools
from openerp.tools import float_compare, DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.decimal_precision as dp
import logging
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Incoterms
#----------------------------------------------------------
class stock_incoterms(osv.osv):
_name = "stock.incoterms"
_description = "Incoterms"
_columns = {
'name': fields.char('Name', size=64, required=True, help="Incoterms are series of sales terms.They are used to divide transaction costs and responsibilities between buyer and seller and reflect state-of-the-art transportation practices."),
'code': fields.char('Code', size=3, required=True, help="Code for Incoterms"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide an INCOTERM without deleting it."),
}
_defaults = {
'active': True,
}
stock_incoterms()
class stock_journal(osv.osv):
_name = "stock.journal"
_description = "Stock Journal"
_columns = {
'name': fields.char('Stock Journal', size=32, required=True),
'user_id': fields.many2one('res.users', 'Responsible'),
}
_defaults = {
'user_id': lambda s, c, u, ctx: u
}
stock_journal()
#----------------------------------------------------------
# Stock Location
#----------------------------------------------------------
class stock_location(osv.osv):
_name = "stock.location"
_description = "Location"
_parent_name = "location_id"
_parent_store = True
_parent_order = 'posz,name'
_order = 'parent_left'
# TODO: implement name_search() in a way that matches the results of name_get!
def name_get(self, cr, uid, ids, context=None):
# always return the full hierarchical name
res = self._complete_name(cr, uid, ids, 'complete_name', None, context=context)
return res.items()
def _complete_name(self, cr, uid, ids, name, args, context=None):
""" Forms complete name of location from parent location to child location.
@return: Dictionary of values
"""
res = {}
for m in self.browse(cr, uid, ids, context=context):
names = [m.name]
parent = m.location_id
while parent:
names.append(parent.name)
parent = parent.location_id
res[m.id] = ' / '.join(reversed(names))
return res
def _get_sublocations(self, cr, uid, ids, context=None):
""" return all sublocations of the given stock locations (included) """
return self.search(cr, uid, [('id', 'child_of', ids)], context=context)
def _product_value(self, cr, uid, ids, field_names, arg, context=None):
"""Computes stock value (real and virtual) for a product, as well as stock qty (real and virtual).
@param field_names: Name of field
@return: Dictionary of values
"""
prod_id = context and context.get('product_id', False)
if not prod_id:
return dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids])
product_product_obj = self.pool.get('product.product')
cr.execute('select distinct product_id, location_id from stock_move where location_id in %s', (tuple(ids), ))
dict1 = cr.dictfetchall()
cr.execute('select distinct product_id, location_dest_id as location_id from stock_move where location_dest_id in %s', (tuple(ids), ))
dict2 = cr.dictfetchall()
res_products_by_location = sorted(dict1+dict2, key=itemgetter('location_id'))
products_by_location = dict((k, [v['product_id'] for v in itr]) for k, itr in groupby(res_products_by_location, itemgetter('location_id')))
result = dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids])
result.update(dict([(i, {}.fromkeys(field_names, 0.0)) for i in list(set([aaa['location_id'] for aaa in res_products_by_location]))]))
currency_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id.id
currency_obj = self.pool.get('res.currency')
currency = currency_obj.browse(cr, uid, currency_id, context=context)
for loc_id, product_ids in products_by_location.items():
if prod_id:
product_ids = [prod_id]
c = (context or {}).copy()
c['location'] = loc_id
for prod in product_product_obj.browse(cr, uid, product_ids, context=c):
for f in field_names:
if f == 'stock_real':
if loc_id not in result:
result[loc_id] = {}
result[loc_id][f] += prod.qty_available
elif f == 'stock_virtual':
result[loc_id][f] += prod.virtual_available
elif f == 'stock_real_value':
amount = prod.qty_available * prod.standard_price
amount = currency_obj.round(cr, uid, currency, amount)
result[loc_id][f] += amount
elif f == 'stock_virtual_value':
amount = prod.virtual_available * prod.standard_price
amount = currency_obj.round(cr, uid, currency, amount)
result[loc_id][f] += amount
return result
_columns = {
'name': fields.char('Location Name', size=64, required=True, translate=True),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a location without deleting it."),
'usage': fields.selection([('supplier', 'Supplier Location'), ('view', 'View'), ('internal', 'Internal Location'), ('customer', 'Customer Location'), ('inventory', 'Inventory'), ('procurement', 'Procurement'), ('production', 'Production'), ('transit', 'Transit Location for Inter-Companies Transfers')], 'Location Type', required=True,
help="""* Supplier Location: Virtual location representing the source location for products coming from your suppliers
\n* View: Virtual location used to create a hierarchical structures for your warehouse, aggregating its child locations ; can't directly contain products
\n* Internal Location: Physical locations inside your own warehouses,
\n* Customer Location: Virtual location representing the destination location for products sent to your customers
\n* Inventory: Virtual location serving as counterpart for inventory operations used to correct stock levels (Physical inventories)
\n* Procurement: Virtual location serving as temporary counterpart for procurement operations when the source (supplier or production) is not known yet. This location should be empty when the procurement scheduler has finished running.
\n* Production: Virtual counterpart location for production operations: this location consumes the raw material and produces finished products
""", select = True),
# temporarily removed, as it's unused: 'allocation_method': fields.selection([('fifo', 'FIFO'), ('lifo', 'LIFO'), ('nearest', 'Nearest')], 'Allocation Method', required=True),
'complete_name': fields.function(_complete_name, type='char', size=256, string="Location Name",
store={'stock.location': (_get_sublocations, ['name', 'location_id'], 10)}),
'stock_real': fields.function(_product_value, type='float', string='Real Stock', multi="stock"),
'stock_virtual': fields.function(_product_value, type='float', string='Virtual Stock', multi="stock"),
'location_id': fields.many2one('stock.location', 'Parent Location', select=True, ondelete='cascade'),
'child_ids': fields.one2many('stock.location', 'location_id', 'Contains'),
'chained_journal_id': fields.many2one('stock.journal', 'Chaining Journal',help="Inventory Journal in which the chained move will be written, if the Chaining Type is not Transparent (no journal is used if left empty)"),
'chained_location_id': fields.many2one('stock.location', 'Chained Location If Fixed'),
'chained_location_type': fields.selection([('none', 'None'), ('customer', 'Customer'), ('fixed', 'Fixed Location')],
'Chained Location Type', required=True,
help="Determines whether this location is chained to another location, i.e. any incoming product in this location \n" \
"should next go to the chained location. The chained location is determined according to the type :"\
"\n* None: No chaining at all"\
"\n* Customer: The chained location will be taken from the Customer Location field on the Partner form of the Partner that is specified in the Picking list of the incoming products." \
"\n* Fixed Location: The chained location is taken from the next field: Chained Location if Fixed." \
),
'chained_auto_packing': fields.selection(
[('auto', 'Automatic Move'), ('manual', 'Manual Operation'), ('transparent', 'Automatic No Step Added')],
'Chaining Type',
required=True,
help="This is used only if you select a chained location type.\n" \
"The 'Automatic Move' value will create a stock move after the current one that will be "\
"validated automatically. With 'Manual Operation', the stock move has to be validated "\
"by a worker. With 'Automatic No Step Added', the location is replaced in the original move."
),
'chained_picking_type': fields.selection([('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], 'Shipping Type', help="Shipping Type of the Picking List that will contain the chained move (leave empty to automatically detect the type based on the source and destination locations)."),
'chained_company_id': fields.many2one('res.company', 'Chained Company', help='The company the Picking List containing the chained move will belong to (leave empty to use the default company determination rules'),
'chained_delay': fields.integer('Chaining Lead Time',help="Delay between original move and chained move in days"),
'partner_id': fields.many2one('res.partner', 'Location Address',help="Address of customer or supplier."),
'icon': fields.selection(tools.icons, 'Icon', size=64,help="Icon show in hierarchical tree view"),
'comment': fields.text('Additional Information'),
'posx': fields.integer('Corridor (X)',help="Optional localization details, for information purpose only"),
'posy': fields.integer('Shelves (Y)', help="Optional localization details, for information purpose only"),
'posz': fields.integer('Height (Z)', help="Optional localization details, for information purpose only"),
'parent_left': fields.integer('Left Parent', select=1),
'parent_right': fields.integer('Right Parent', select=1),
'stock_real_value': fields.function(_product_value, type='float', string='Real Stock Value', multi="stock", digits_compute=dp.get_precision('Account')),
'stock_virtual_value': fields.function(_product_value, type='float', string='Virtual Stock Value', multi="stock", digits_compute=dp.get_precision('Account')),
'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this location is shared between all companies'),
'scrap_location': fields.boolean('Scrap Location', help='Check this box to allow using this location to put scrapped/damaged goods.'),
'valuation_in_account_id': fields.many2one('account.account', 'Stock Valuation Account (Incoming)', domain = [('type','=','other')],
help="Used for real-time inventory valuation. When set on a virtual location (non internal type), "
"this account will be used to hold the value of products being moved from an internal location "
"into this location, instead of the generic Stock Output Account set on the product. "
"This has no effect for internal locations."),
'valuation_out_account_id': fields.many2one('account.account', 'Stock Valuation Account (Outgoing)', domain = [('type','=','other')],
help="Used for real-time inventory valuation. When set on a virtual location (non internal type), "
"this account will be used to hold the value of products being moved out of this location "
"and into an internal location, instead of the generic Stock Output Account set on the product. "
"This has no effect for internal locations."),
}
_defaults = {
'active': True,
'usage': 'internal',
'chained_location_type': 'none',
'chained_auto_packing': 'manual',
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location', context=c),
'posx': 0,
'posy': 0,
'posz': 0,
'icon': False,
'scrap_location': False,
}
def chained_location_get(self, cr, uid, location, partner=None, product=None, context=None):
""" Finds chained location
@param location: Location id
@param partner: Partner id
@param product: Product id
@return: List of values
"""
result = None
if location.chained_location_type == 'customer':
if partner:
result = partner.property_stock_customer
else:
loc_id = self.pool['res.partner'].default_get(cr, uid, ['property_stock_customer'], context=context)['property_stock_customer']
result = self.pool['stock.location'].browse(cr, uid, loc_id, context=context)
elif location.chained_location_type == 'fixed':
result = location.chained_location_id
if result:
return result, location.chained_auto_packing, location.chained_delay, location.chained_journal_id and location.chained_journal_id.id or False, location.chained_company_id and location.chained_company_id.id or False, location.chained_picking_type, False
return result
def picking_type_get(self, cr, uid, from_location, to_location, context=None):
""" Gets type of picking.
@param from_location: Source location
@param to_location: Destination location
@return: Location type
"""
result = 'internal'
if (from_location.usage=='internal') and (to_location and to_location.usage in ('customer', 'supplier')):
result = 'out'
elif (from_location.usage in ('supplier', 'customer')) and (to_location.usage == 'internal'):
result = 'in'
return result
def _product_get_all_report(self, cr, uid, ids, product_ids=False, context=None):
return self._product_get_report(cr, uid, ids, product_ids, context, recursive=True)
def _product_get_report(self, cr, uid, ids, product_ids=False,
context=None, recursive=False):
""" Finds the product quantity and price for particular location.
@param product_ids: Ids of product
@param recursive: True or False
@return: Dictionary of values
"""
if context is None:
context = {}
product_obj = self.pool.get('product.product')
# Take the user company and pricetype
context['currency_id'] = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
# To be able to offer recursive or non-recursive reports we need to prevent recursive quantities by default
context['compute_child'] = False
if not product_ids:
product_ids = product_obj.search(cr, uid, [], context={'active_test': False})
products = product_obj.browse(cr, uid, product_ids, context=context)
products_by_uom = {}
products_by_id = {}
for product in products:
products_by_uom.setdefault(product.uom_id.id, [])
products_by_uom[product.uom_id.id].append(product)
products_by_id.setdefault(product.id, [])
products_by_id[product.id] = product
result = {}
result['product'] = []
for id in ids:
quantity_total = 0.0
total_price = 0.0
for uom_id in products_by_uom.keys():
fnc = self._product_get
if recursive:
fnc = self._product_all_get
ctx = context.copy()
ctx['uom'] = uom_id
qty = fnc(cr, uid, id, [x.id for x in products_by_uom[uom_id]],
context=ctx)
for product_id in qty.keys():
if not qty[product_id]:
continue
product = products_by_id[product_id]
quantity_total += qty[product_id]
# Compute based on pricetype
# Choose the right filed standard_price to read
amount_unit = product.price_get('standard_price', context=context)[product.id]
price = qty[product_id] * amount_unit
total_price += price
result['product'].append({
'price': amount_unit,
'prod_name': product.name,
'code': product.default_code, # used by lot_overview_all report!
'variants': product.variants or '',
'uom': product.uom_id.name,
'prod_qty': qty[product_id],
'price_value': price,
})
result['total'] = quantity_total
result['total_price'] = total_price
return result
def _product_get_multi_location(self, cr, uid, ids, product_ids=False, context=None,
states=['done'], what=('in', 'out')):
"""
@param product_ids: Ids of product
@param states: List of states
@param what: Tuple of
@return:
"""
product_obj = self.pool.get('product.product')
if context is None:
context = {}
context.update({
'states': states,
'what': what,
'location': ids
})
return product_obj.get_product_available(cr, uid, product_ids, context=context)
def _product_get(self, cr, uid, id, product_ids=False, context=None, states=None):
"""
@param product_ids:
@param states:
@return:
"""
if states is None:
states = ['done']
ids = id and [id] or []
return self._product_get_multi_location(cr, uid, ids, product_ids, context=context, states=states)
def _product_all_get(self, cr, uid, id, product_ids=False, context=None, states=None):
if states is None:
states = ['done']
# build the list of ids of children of the location given by id
ids = id and [id] or []
location_ids = self.search(cr, uid, [('location_id', 'child_of', ids)])
return self._product_get_multi_location(cr, uid, location_ids, product_ids, context, states)
def _product_virtual_get(self, cr, uid, id, product_ids=False, context=None, states=None):
if states is None:
states = ['done']
return self._product_all_get(cr, uid, id, product_ids, context, ['confirmed', 'waiting', 'assigned', 'done'])
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None, lock=False):
"""
Attempt to find a quantity ``product_qty`` (in the product's default uom or the uom passed in ``context``) of product ``product_id``
in locations with id ``ids`` and their child locations. If ``lock`` is True, the stock.move lines
of product with id ``product_id`` in the searched location will be write-locked using Postgres's
"FOR UPDATE NOWAIT" option until the transaction is committed or rolled back, to prevent reservin
twice the same products.
If ``lock`` is True and the lock cannot be obtained (because another transaction has locked some of
the same stock.move lines), a log line will be output and False will be returned, as if there was
not enough stock.
:param product_id: Id of product to reserve
:param product_qty: Quantity of product to reserve (in the product's default uom or the uom passed in ``context``)
:param lock: if True, the stock.move lines of product with id ``product_id`` in all locations (and children locations) with ``ids`` will
be write-locked using postgres's "FOR UPDATE NOWAIT" option until the transaction is committed or rolled back. This is
to prevent reserving twice the same products.
:param context: optional context dictionary: if a 'uom' key is present it will be used instead of the default product uom to
compute the ``product_qty`` and in the return value.
:return: List of tuples in the form (qty, location_id) with the (partial) quantities that can be taken in each location to
reach the requested product_qty (``qty`` is expressed in the default uom of the product), of False if enough
products could not be found, or the lock could not be obtained (and ``lock`` was True).
"""
result = []
amount = 0.0
if context is None:
context = {}
uom_obj = self.pool.get('product.uom')
uom_rounding = self.pool.get('product.product').browse(cr, uid, product_id, context=context).uom_id.rounding
if context.get('uom'):
uom_rounding = uom_obj.browse(cr, uid, context.get('uom'), context=context).rounding
locations_ids = self.search(cr, uid, [('location_id', 'child_of', ids)])
if locations_ids:
# Fetch only the locations in which this product has ever been processed (in or out)
cr.execute("""SELECT l.id FROM stock_location l WHERE l.id in %s AND
EXISTS (SELECT 1 FROM stock_move m WHERE m.product_id = %s
AND ((state = 'done' AND m.location_dest_id = l.id)
OR (state in ('done','assigned') AND m.location_id = l.id)))
""", (tuple(locations_ids), product_id,))
locations_ids = [i for (i,) in cr.fetchall()]
for id in locations_ids:
if lock:
try:
# Must lock with a separate select query because FOR UPDATE can't be used with
# aggregation/group by's (when individual rows aren't identifiable).
# We use a SAVEPOINT to be able to rollback this part of the transaction without
# failing the whole transaction in case the LOCK cannot be acquired.
cr.execute("SAVEPOINT stock_location_product_reserve")
cr.execute("""SELECT id FROM stock_move
WHERE product_id=%s AND
(
(location_dest_id=%s AND
location_id<>%s AND
state='done')
OR
(location_id=%s AND
location_dest_id<>%s AND
state in ('done', 'assigned'))
)
FOR UPDATE of stock_move NOWAIT""", (product_id, id, id, id, id), log_exceptions=False)
except Exception:
# Here it's likely that the FOR UPDATE NOWAIT failed to get the LOCK,
# so we ROLLBACK to the SAVEPOINT to restore the transaction to its earlier
# state, we return False as if the products were not available, and log it:
cr.execute("ROLLBACK TO stock_location_product_reserve")
_logger.warning("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
_logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
return False
# XXX TODO: rewrite this with one single query, possibly even the quantity conversion
cr.execute("""SELECT product_uom, sum(product_qty) AS product_qty
FROM stock_move
WHERE location_dest_id=%s AND
location_id<>%s AND
product_id=%s AND
state='done'
GROUP BY product_uom
""",
(id, id, product_id))
results = cr.dictfetchall()
cr.execute("""SELECT product_uom,-sum(product_qty) AS product_qty
FROM stock_move
WHERE location_id=%s AND
location_dest_id<>%s AND
product_id=%s AND
state in ('done', 'assigned')
GROUP BY product_uom
""",
(id, id, product_id))
results += cr.dictfetchall()
total = 0.0
results2 = 0.0
for r in results:
amount = uom_obj._compute_qty(cr, uid, r['product_uom'], r['product_qty'], context.get('uom', False))
results2 += amount
total += amount
if total <= 0.0:
continue
amount = results2
compare_qty = float_compare(amount, 0, precision_rounding=uom_rounding)
if compare_qty == 1:
if amount > min(total, product_qty):
amount = min(product_qty, total)
result.append((amount, id))
product_qty -= amount
total -= amount
if product_qty <= 0.0:
return result
if total <= 0.0:
continue
return False
stock_location()
class stock_tracking(osv.osv):
_name = "stock.tracking"
_description = "Packs"
def checksum(sscc):
salt = '31' * 8 + '3'
sum = 0
for sscc_part, salt_part in zip(sscc, salt):
sum += int(sscc_part) * int(salt_part)
return (10 - (sum % 10)) % 10
checksum = staticmethod(checksum)
def make_sscc(self, cr, uid, context=None):
sequence = self.pool.get('ir.sequence').get(cr, uid, 'stock.lot.tracking')
try:
return sequence + str(self.checksum(sequence))
except Exception:
return sequence
_columns = {
'name': fields.char('Pack Reference', size=64, required=True, select=True, help="By default, the pack reference is generated following the sscc standard. (Serial number + 1 check digit)"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a pack without deleting it."),
'serial': fields.char('Additional Reference', size=64, select=True, help="Other reference or serial number"),
'move_ids': fields.one2many('stock.move', 'tracking_id', 'Moves for this pack', readonly=True),
'date': fields.datetime('Creation Date', required=True),
}
_defaults = {
'active': 1,
'name': make_sscc,
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = self.search(cr, user, [('serial', '=', name)]+ args, limit=limit, context=context)
ids += self.search(cr, user, [('name', operator, name)]+ args, limit=limit, context=context)
return self.name_get(cr, user, ids, context)
def name_get(self, cr, uid, ids, context=None):
"""Append the serial to the name"""
if not len(ids):
return []
res = [ (r['id'], r['serial'] and '%s [%s]' % (r['name'], r['serial'])
or r['name'] )
for r in self.read(cr, uid, ids, ['name', 'serial'],
context=context) ]
return res
def unlink(self, cr, uid, ids, context=None):
raise osv.except_osv(_('Error!'), _('You cannot remove a lot line.'))
def action_traceability(self, cr, uid, ids, context=None):
""" It traces the information of a product
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return: A dictionary of values
"""
return self.pool.get('action.traceability').action_traceability(cr,uid,ids,context)
stock_tracking()
#----------------------------------------------------------
# Stock Picking
#----------------------------------------------------------
class stock_picking(osv.osv):
_name = "stock.picking"
_inherit = ['mail.thread']
_description = "Picking List"
_order = "id desc"
def _set_maximum_date(self, cr, uid, ids, name, value, arg, context=None):
""" Calculates planned date if it is greater than 'value'.
@param name: Name of field
@param value: Value of field
@param arg: User defined argument
@return: True or False
"""
if not value:
return False
if isinstance(ids, (int, long)):
ids = [ids]
for pick in self.browse(cr, uid, ids, context=context):
sql_str = """update stock_move set
date_expected='%s'
where
picking_id=%d """ % (value, pick.id)
if pick.max_date:
sql_str += " and (date_expected='" + pick.max_date + "')"
cr.execute(sql_str)
return True
def _set_minimum_date(self, cr, uid, ids, name, value, arg, context=None):
""" Calculates planned date if it is less than 'value'.
@param name: Name of field
@param value: Value of field
@param arg: User defined argument
@return: True or False
"""
if not value:
return False
if isinstance(ids, (int, long)):
ids = [ids]
for pick in self.browse(cr, uid, ids, context=context):
sql_str = """update stock_move set
date_expected='%s'
where
picking_id=%s """ % (value, pick.id)
if pick.min_date:
sql_str += " and (date_expected='" + pick.min_date + "')"
cr.execute(sql_str)
return True
def get_min_max_date(self, cr, uid, ids, field_name, arg, context=None):
""" Finds minimum and maximum dates for picking.
@return: Dictionary of values
"""
res = {}
for id in ids:
res[id] = {'min_date': False, 'max_date': False}
if not ids:
return res
cr.execute("""select
picking_id,
min(date_expected),
max(date_expected)
from
stock_move
where
picking_id IN %s
group by
picking_id""",(tuple(ids),))
for pick, dt1, dt2 in cr.fetchall():
res[pick]['min_date'] = dt1
res[pick]['max_date'] = dt2
return res
def create(self, cr, user, vals, context=None):
if ('name' not in vals) or (vals.get('name')=='/'):
seq_obj_name = self._name
vals['name'] = self.pool.get('ir.sequence').get(cr, user, seq_obj_name)
new_id = super(stock_picking, self).create(cr, user, vals, context)
return new_id
_columns = {
'name': fields.char('Reference', size=64, select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'origin': fields.char('Source Document', size=64, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Reference of the document", select=True),
'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'type': fields.selection([('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], 'Shipping Type', required=True, select=True, help="Shipping type specify, goods coming in or going out."),
'note': fields.text('Notes', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'stock_journal_id': fields.many2one('stock.journal','Stock Journal', select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'location_id': fields.many2one('stock.location', 'Location', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Keep empty if you produce at the location where the finished products are needed." \
"Set a location if you produce at a fixed location. This can be a partner location " \
"if you subcontract the manufacturing operations.", select=True),
'location_dest_id': fields.many2one('stock.location', 'Dest. Location', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Location where the system will stock the finished products.", select=True),
'move_type': fields.selection([('direct', 'Partial'), ('one', 'All at once')], 'Delivery Method', required=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="It specifies goods to be deliver partially or all at once"),
'state': fields.selection([
('draft', 'Draft'),
('cancel', 'Cancelled'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Transfer'),
('done', 'Transferred'),
], 'Status', readonly=True, select=True, track_visibility='onchange', help="""
* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Transfer: products reserved, simply waiting for confirmation.\n
* Transferred: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""
),
'min_date': fields.function(get_min_max_date, fnct_inv=_set_minimum_date, multi="min_max_date",
store=True, type='datetime', string='Scheduled Time', select=1, help="Scheduled time for the shipment to be processed"),
'date': fields.datetime('Creation Date', help="Creation date, usually the time of the order.", select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'max_date': fields.function(get_min_max_date, fnct_inv=_set_maximum_date, multi="min_max_date",
store=True, type='datetime', string='Max. Expected Date', select=2),
'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}),
'product_id': fields.related('move_lines', 'product_id', type='many2one', relation='product.product', string='Product'),
'auto_picking': fields.boolean('Auto-Picking', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'partner_id': fields.many2one('res.partner', 'Partner', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'invoice_state': fields.selection([
("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")], "Invoice Control",
select=True, required=True, readonly=True, track_visibility='onchange', states={'draft': [('readonly', False)]}),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
}
_defaults = {
'name': lambda self, cr, uid, context: '/',
'state': 'draft',
'move_type': 'direct',
'type': 'internal',
'invoice_state': 'none',
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.picking', context=c)
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Reference must be unique per Company!'),
]
def action_process(self, cr, uid, ids, context=None):
if context is None:
context = {}
"""Open the partial picking wizard"""
context.update({
'active_model': self._name,
'active_ids': ids,
'active_id': len(ids) and ids[0] or False
})
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'stock.partial.picking',
'type': 'ir.actions.act_window',
'target': 'new',
'context': context,
'nodestroy': True,
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
picking_obj = self.browse(cr, uid, id, context=context)
move_obj = self.pool.get('stock.move')
if ('name' not in default) or (picking_obj.name == '/'):
seq_obj_name = 'stock.picking.' + picking_obj.type
default['name'] = self.pool.get('ir.sequence').get(cr, uid, seq_obj_name)
default['origin'] = ''
default['backorder_id'] = False
if 'invoice_state' not in default and picking_obj.invoice_state == 'invoiced':
default['invoice_state'] = '2binvoiced'
res = super(stock_picking, self).copy(cr, uid, id, default, context)
if res:
picking_obj = self.browse(cr, uid, res, context=context)
for move in picking_obj.move_lines:
move_obj.write(cr, uid, [move.id], {'tracking_id': False, 'prodlot_id': False, 'move_history_ids2': [(6, 0, [])], 'move_history_ids': [(6, 0, [])]})
return res
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
if view_type == 'form' and not view_id:
mod_obj = self.pool.get('ir.model.data')
if self._name == "stock.picking.in":
model, view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_in_form')
if self._name == "stock.picking.out":
model, view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_out_form')
return super(stock_picking, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
return {}
def action_explode(self, cr, uid, moves, context=None):
"""Hook to allow other modules to split the moves of a picking."""
return moves
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms picking.
@return: True
"""
pickings = self.browse(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'state': 'confirmed'})
todo = []
for picking in pickings:
for r in picking.move_lines:
if r.state == 'draft':
todo.append(r.id)
todo = self.action_explode(cr, uid, todo, context)
if len(todo):
self.pool.get('stock.move').action_confirm(cr, uid, todo, context=context)
return True
def test_auto_picking(self, cr, uid, ids):
# TODO: Check locations to see if in the same location ?
return True
def action_assign(self, cr, uid, ids, *args):
""" Changes state of picking to available if all moves are confirmed.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
if pick.state == 'draft':
wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_confirm', cr)
move_ids = [x.id for x in pick.move_lines if x.state == 'confirmed']
if not move_ids:
raise osv.except_osv(_('Warning!'),_('Not enough stock, unable to reserve the products.'))
self.pool.get('stock.move').action_assign(cr, uid, move_ids)
return True
def force_assign(self, cr, uid, ids, *args):
""" Changes state of picking to available if moves are confirmed or waiting.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
move_ids = [x.id for x in pick.move_lines if x.state in ['confirmed','waiting']]
self.pool.get('stock.move').force_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return True
def draft_force_assign(self, cr, uid, ids, *args):
""" Confirms picking directly from draft state.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
if not pick.move_lines:
raise osv.except_osv(_('Error!'),_('You cannot process picking without stock moves.'))
wf_service.trg_validate(uid, 'stock.picking', pick.id,
'button_confirm', cr)
return True
def draft_validate(self, cr, uid, ids, context=None):
""" Validates picking directly from draft state.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
self.draft_force_assign(cr, uid, ids)
for pick in self.browse(cr, uid, ids, context=context):
move_ids = [x.id for x in pick.move_lines]
self.pool.get('stock.move').force_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return self.action_process(
cr, uid, ids, context=context)
def cancel_assign(self, cr, uid, ids, *args):
""" Cancels picking and moves.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
move_ids = [x.id for x in pick.move_lines]
self.pool.get('stock.move').cancel_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return True
def action_assign_wkf(self, cr, uid, ids, context=None):
""" Changes picking state to assigned.
@return: True
"""
self.write(cr, uid, ids, {'state': 'assigned'})
return True
def test_finished(self, cr, uid, ids):
""" Tests whether the move is in done or cancel state or not.
@return: True or False
"""
move_ids = self.pool.get('stock.move').search(cr, uid, [('picking_id', 'in', ids)])
for move in self.pool.get('stock.move').browse(cr, uid, move_ids):
if move.state not in ('done', 'cancel'):
if move.product_qty != 0.0:
return False
else:
move.write({'state': 'done'})
return True
def test_assigned(self, cr, uid, ids):
""" Tests whether the move is in assigned state or not.
@return: True or False
"""
#TOFIX: assignment of move lines should be call before testing assigment otherwise picking never gone in assign state
ok = True
for pick in self.browse(cr, uid, ids):
mt = pick.move_type
# incomming shipments are always set as available if they aren't chained
if pick.type == 'in':
if all([x.state != 'waiting' for x in pick.move_lines]):
return True
for move in pick.move_lines:
if (move.state in ('confirmed', 'draft')) and (mt == 'one'):
return False
if (mt == 'direct') and (move.state == 'assigned') and (move.product_qty):
return True
ok = ok and (move.state in ('cancel', 'done', 'assigned'))
return ok
def action_cancel(self, cr, uid, ids, context=None):
""" Changes picking state to cancel.
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
ids2 = [move.id for move in pick.move_lines]
self.pool.get('stock.move').action_cancel(cr, uid, ids2, context)
self.write(cr, uid, ids, {'state': 'cancel', 'invoice_state': 'none'})
return True
#
# TODO: change and create a move if not parents
#
def action_done(self, cr, uid, ids, context=None):
"""Changes picking state to done.
This method is called at the end of the workflow by the activity "done".
@return: True
"""
self.write(cr, uid, ids, {'state': 'done', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S')})
return True
def action_move(self, cr, uid, ids, context=None):
"""Process the Stock Moves of the Picking
This method is called by the workflow by the activity "move".
Normally that happens when the signal button_done is received (button
"Done" pressed on a Picking view).
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
todo = []
for move in pick.move_lines:
if move.state == 'draft':
self.pool.get('stock.move').action_confirm(cr, uid, [move.id],
context=context)
todo.append(move.id)
elif move.state in ('assigned','confirmed'):
todo.append(move.id)
if len(todo):
self.pool.get('stock.move').action_done(cr, uid, todo,
context=context)
return True
def get_currency_id(self, cr, uid, picking):
return False
def _get_partner_to_invoice(self, cr, uid, picking, context=None):
""" Gets the partner that will be invoiced
Note that this function is inherited in the sale and purchase modules
@param picking: object of the picking for which we are selecting the partner to invoice
@return: object of the partner to invoice
"""
return picking.partner_id and picking.partner_id.id
def _get_comment_invoice(self, cr, uid, picking):
"""
@return: comment string for invoice
"""
return picking.note or ''
def _get_price_unit_invoice(self, cr, uid, move_line, type, context=None):
""" Gets price unit for invoice
@param move_line: Stock move lines
@param type: Type of invoice
@return: The price unit for the move line
"""
if context is None:
context = {}
if type in ('in_invoice', 'in_refund'):
# Take the user company and pricetype
context['currency_id'] = move_line.company_id.currency_id.id
amount_unit = move_line.product_id.price_get('standard_price', context=context)[move_line.product_id.id]
return amount_unit
else:
return move_line.product_id.list_price
def _get_discount_invoice(self, cr, uid, move_line):
'''Return the discount for the move line'''
return 0.0
def _get_taxes_invoice(self, cr, uid, move_line, type):
""" Gets taxes on invoice
@param move_line: Stock move lines
@param type: Type of invoice
@return: Taxes Ids for the move line
"""
if type in ('in_invoice', 'in_refund'):
taxes = move_line.product_id.supplier_taxes_id
else:
taxes = move_line.product_id.taxes_id
if move_line.picking_id and move_line.picking_id.partner_id and move_line.picking_id.partner_id.id:
return self.pool.get('account.fiscal.position').map_tax(
cr,
uid,
move_line.picking_id.partner_id.property_account_position,
taxes
)
else:
return map(lambda x: x.id, taxes)
def _get_account_analytic_invoice(self, cr, uid, picking, move_line):
return False
def _invoice_line_hook(self, cr, uid, move_line, invoice_line_id):
'''Call after the creation of the invoice line'''
return
def _invoice_hook(self, cr, uid, picking, invoice_id):
'''Call after the creation of the invoice'''
return
def _get_invoice_type(self, pick):
src_usage = dest_usage = None
inv_type = None
if pick.invoice_state == '2binvoiced':
if pick.move_lines:
src_usage = pick.move_lines[0].location_id.usage
dest_usage = pick.move_lines[0].location_dest_id.usage
if pick.type == 'out' and dest_usage == 'supplier':
inv_type = 'in_refund'
elif pick.type == 'out' and dest_usage == 'customer':
inv_type = 'out_invoice'
elif pick.type == 'in' and src_usage == 'supplier':
inv_type = 'in_invoice'
elif pick.type == 'in' and src_usage == 'customer':
inv_type = 'out_refund'
else:
inv_type = 'out_invoice'
return inv_type
def _prepare_invoice_group(self, cr, uid, picking, partner, invoice, context=None):
""" Builds the dict for grouped invoices
@param picking: picking object
@param partner: object of the partner to invoice (not used here, but may be usefull if this function is inherited)
@param invoice: object of the invoice that we are updating
@return: dict that will be used to update the invoice
"""
comment = self._get_comment_invoice(cr, uid, picking)
return {
'name': (invoice.name or '') + ', ' + (picking.name or ''),
'origin': (invoice.origin or '') + ', ' + (picking.name or '') + (picking.origin and (':' + picking.origin) or ''),
'comment': (comment and (invoice.comment and invoice.comment + "\n" + comment or comment)) or (invoice.comment and invoice.comment or ''),
'date_invoice': context.get('date_inv', False),
'user_id': uid,
}
def _prepare_invoice(self, cr, uid, picking, partner, inv_type, journal_id, context=None):
""" Builds the dict containing the values for the invoice
@param picking: picking object
@param partner: object of the partner to invoice
@param inv_type: type of the invoice ('out_invoice', 'in_invoice', ...)
@param journal_id: ID of the accounting journal
@return: dict that will be used to create the invoice object
"""
if isinstance(partner, int):
partner = self.pool.get('res.partner').browse(cr, uid, partner, context=context)
if inv_type in ('out_invoice', 'out_refund'):
account_id = partner.property_account_receivable.id
payment_term = partner.property_payment_term.id or False
else:
account_id = partner.property_account_payable.id
payment_term = partner.property_supplier_payment_term.id or False
comment = self._get_comment_invoice(cr, uid, picking)
invoice_vals = {
'name': picking.name,
'origin': (picking.name or '') + (picking.origin and (':' + picking.origin) or ''),
'type': inv_type,
'account_id': account_id,
'partner_id': partner.id,
'comment': comment,
'payment_term': payment_term,
'fiscal_position': partner.property_account_position.id,
'date_invoice': context.get('date_inv', False),
'company_id': picking.company_id.id,
'user_id': uid,
}
cur_id = self.get_currency_id(cr, uid, picking)
if cur_id:
invoice_vals['currency_id'] = cur_id
if journal_id:
invoice_vals['journal_id'] = journal_id
return invoice_vals
def _prepare_invoice_line(self, cr, uid, group, picking, move_line, invoice_id,
invoice_vals, context=None):
""" Builds the dict containing the values for the invoice line
@param group: True or False
@param picking: picking object
@param: move_line: move_line object
@param: invoice_id: ID of the related invoice
@param: invoice_vals: dict used to created the invoice
@return: dict that will be used to create the invoice line
"""
if group:
name = (picking.name or '') + '-' + move_line.name
else:
name = move_line.name
origin = move_line.picking_id.name or ''
if move_line.picking_id.origin:
origin += ':' + move_line.picking_id.origin
if invoice_vals['type'] in ('out_invoice', 'out_refund'):
account_id = move_line.product_id.property_account_income.id
if not account_id:
account_id = move_line.product_id.categ_id.\
property_account_income_categ.id
else:
account_id = move_line.product_id.property_account_expense.id
if not account_id:
account_id = move_line.product_id.categ_id.\
property_account_expense_categ.id
if invoice_vals['fiscal_position']:
fp_obj = self.pool.get('account.fiscal.position')
fiscal_position = fp_obj.browse(cr, uid, invoice_vals['fiscal_position'], context=context)
account_id = fp_obj.map_account(cr, uid, fiscal_position, account_id)
# set UoS if it's a sale and the picking doesn't have one
uos_id = move_line.product_uos and move_line.product_uos.id or False
if not uos_id and invoice_vals['type'] in ('out_invoice', 'out_refund'):
uos_id = move_line.product_uom.id
return {
'name': name,
'origin': origin,
'invoice_id': invoice_id,
'uos_id': uos_id,
'product_id': move_line.product_id.id,
'account_id': account_id,
'price_unit': self._get_price_unit_invoice(cr, uid, move_line, invoice_vals['type']),
'discount': self._get_discount_invoice(cr, uid, move_line),
'quantity': move_line.product_uos_qty or move_line.product_qty,
'invoice_line_tax_id': [(6, 0, self._get_taxes_invoice(cr, uid, move_line, invoice_vals['type']))],
'account_analytic_id': self._get_account_analytic_invoice(cr, uid, picking, move_line),
}
def action_invoice_create(self, cr, uid, ids, journal_id=False,
group=False, type='out_invoice', context=None):
""" Creates invoice based on the invoice state selected for picking.
@param journal_id: Id of journal
@param group: Whether to create a group invoice or not
@param type: Type invoice to be created
@return: Ids of created invoices for the pickings
"""
if context is None:
context = {}
invoice_obj = self.pool.get('account.invoice')
invoice_line_obj = self.pool.get('account.invoice.line')
partner_obj = self.pool.get('res.partner')
invoices_group = {}
res = {}
inv_type = type
for picking in self.browse(cr, uid, ids, context=context):
if picking.invoice_state != '2binvoiced':
continue
partner = self._get_partner_to_invoice(cr, uid, picking, context=context)
if isinstance(partner, int):
partner = partner_obj.browse(cr, uid, [partner], context=context)[0]
if not partner:
raise osv.except_osv(_('Error, no partner!'),
_('Please put a partner on the picking list if you want to generate invoice.'))
if not inv_type:
inv_type = self._get_invoice_type(picking)
if group and partner.id in invoices_group:
invoice_id = invoices_group[partner.id]
invoice = invoice_obj.browse(cr, uid, invoice_id)
invoice_vals_group = self._prepare_invoice_group(cr, uid, picking, partner, invoice, context=context)
invoice_obj.write(cr, uid, [invoice_id], invoice_vals_group, context=context)
else:
invoice_vals = self._prepare_invoice(cr, uid, picking, partner, inv_type, journal_id, context=context)
invoice_id = invoice_obj.create(cr, uid, invoice_vals, context=context)
invoices_group[partner.id] = invoice_id
res[picking.id] = invoice_id
for move_line in picking.move_lines:
if move_line.state == 'cancel':
continue
if move_line.scrapped:
# do no invoice scrapped products
continue
vals = self._prepare_invoice_line(cr, uid, group, picking, move_line,
invoice_id, invoice_vals, context=context)
if vals:
invoice_line_id = invoice_line_obj.create(cr, uid, vals, context=context)
self._invoice_line_hook(cr, uid, move_line, invoice_line_id)
invoice_obj.button_compute(cr, uid, [invoice_id], context=context,
set_total=(inv_type in ('in_invoice', 'in_refund')))
self.write(cr, uid, [picking.id], {
'invoice_state': 'invoiced',
}, context=context)
self._invoice_hook(cr, uid, picking, invoice_id)
self.write(cr, uid, res.keys(), {
'invoice_state': 'invoiced',
}, context=context)
return res
def test_done(self, cr, uid, ids, context=None):
""" Test whether the move lines are done or not.
@return: True or False
"""
ok = False
for pick in self.browse(cr, uid, ids, context=context):
if not pick.move_lines:
return True
for move in pick.move_lines:
if move.state not in ('cancel','done'):
return False
if move.state=='done':
ok = True
return ok
def test_cancel(self, cr, uid, ids, context=None):
""" Test whether the move lines are canceled or not.
@return: True or False
"""
for pick in self.browse(cr, uid, ids, context=context):
for move in pick.move_lines:
if move.state not in ('cancel',):
return False
return True
def allow_cancel(self, cr, uid, ids, context=None):
for pick in self.browse(cr, uid, ids, context=context):
if not pick.move_lines:
return True
for move in pick.move_lines:
if move.state == 'done':
raise osv.except_osv(_('Error!'), _('You cannot cancel the picking as some moves have been done. You should cancel the picking lines.'))
return True
def unlink(self, cr, uid, ids, context=None):
move_obj = self.pool.get('stock.move')
if context is None:
context = {}
for pick in self.browse(cr, uid, ids, context=context):
if pick.state in ['done','cancel']:
raise osv.except_osv(_('Error!'), _('You cannot remove the picking which is in %s state!')%(pick.state,))
else:
ids2 = [move.id for move in pick.move_lines]
ctx = context.copy()
ctx.update({'call_unlink':True})
if pick.state != 'draft':
#Cancelling the move in order to affect Virtual stock of product
move_obj.action_cancel(cr, uid, ids2, ctx)
#Removing the move
move_obj.unlink(cr, uid, ids2, ctx)
return super(stock_picking, self).unlink(cr, uid, ids, context=context)
# FIXME: needs refactoring, this code is partially duplicated in stock_move.do_partial()!
def do_partial(self, cr, uid, ids, partial_datas, context=None):
""" Makes partial picking and moves done.
@param partial_datas : Dictionary containing details of partial picking
like partner_id, partner_id, delivery_date,
delivery moves with product_id, product_qty, uom
@return: Dictionary of values
"""
if context is None:
context = {}
else:
context = dict(context)
res = {}
move_obj = self.pool.get('stock.move')
product_obj = self.pool.get('product.product')
currency_obj = self.pool.get('res.currency')
uom_obj = self.pool.get('product.uom')
sequence_obj = self.pool.get('ir.sequence')
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids, context=context):
new_picking = None
complete, too_many, too_few = [], [], []
move_product_qty, prodlot_ids, product_avail, partial_qty, product_uoms = {}, {}, {}, {}, {}
for move in pick.move_lines:
if move.state in ('done', 'cancel'):
continue
partial_data = partial_datas.get('move%s'%(move.id), {})
product_qty = partial_data.get('product_qty',0.0)
move_product_qty[move.id] = product_qty
product_uom = partial_data.get('product_uom',False)
product_price = partial_data.get('product_price',0.0)
product_currency = partial_data.get('product_currency',False)
prodlot_id = partial_data.get('prodlot_id')
prodlot_ids[move.id] = prodlot_id
product_uoms[move.id] = product_uom
partial_qty[move.id] = uom_obj._compute_qty(cr, uid, product_uoms[move.id], product_qty, move.product_uom.id)
if move.product_qty == partial_qty[move.id]:
complete.append(move)
elif move.product_qty > partial_qty[move.id]:
too_few.append(move)
else:
too_many.append(move)
# Average price computation
if (pick.type == 'in') and (move.product_id.cost_method == 'average'):
product = product_obj.browse(cr, uid, move.product_id.id)
move_currency_id = move.company_id.currency_id.id
context['currency_id'] = move_currency_id
qty = uom_obj._compute_qty(cr, uid, product_uom, product_qty, product.uom_id.id)
if product.id not in product_avail:
# keep track of stock on hand including processed lines not yet marked as done
product_avail[product.id] = product.qty_available
if qty > 0:
new_price = currency_obj.compute(cr, uid, product_currency,
move_currency_id, product_price, round=False)
new_price = uom_obj._compute_price(cr, uid, product_uom, new_price,
product.uom_id.id)
if product_avail[product.id] <= 0:
product_avail[product.id] = 0
new_std_price = new_price
else:
# Get the standard price
amount_unit = product.price_get('standard_price', context=context)[product.id]
new_std_price = ((amount_unit * product_avail[product.id])\
+ (new_price * qty))/(product_avail[product.id] + qty)
# Write the field according to price type field
product_obj.write(cr, uid, [product.id], {'standard_price': new_std_price})
# Record the values that were chosen in the wizard, so they can be
# used for inventory valuation if real-time valuation is enabled.
move_obj.write(cr, uid, [move.id],
{'price_unit': product_price,
'price_currency_id': product_currency})
product_avail[product.id] += qty
for move in too_few:
product_qty = move_product_qty[move.id]
if not new_picking:
new_picking_name = pick.name
self.write(cr, uid, [pick.id],
{'name': sequence_obj.get(cr, uid,
'stock.picking.%s'%(pick.type)),
})
new_picking = self.copy(cr, uid, pick.id,
{
'name': new_picking_name,
'move_lines' : [],
'state':'draft',
})
if product_qty != 0:
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty, #TODO: put correct uos_qty
'picking_id' : new_picking,
'state': 'assigned',
'move_dest_id': False,
'price_unit': move.price_unit,
'product_uom': product_uoms[move.id]
}
prodlot_id = prodlot_ids[move.id]
if prodlot_id:
defaults.update(prodlot_id=prodlot_id)
move_obj.copy(cr, uid, move.id, defaults)
move_obj.write(cr, uid, [move.id],
{
'product_qty': move.product_qty - partial_qty[move.id],
'product_uos_qty': move.product_qty - partial_qty[move.id], #TODO: put correct uos_qty
'prodlot_id': False,
'tracking_id': False,
})
if new_picking:
move_obj.write(cr, uid, [c.id for c in complete], {'picking_id': new_picking})
for move in complete:
defaults = {'product_uom': product_uoms[move.id], 'product_qty': move_product_qty[move.id]}
if prodlot_ids.get(move.id):
defaults.update({'prodlot_id': prodlot_ids[move.id]})
move_obj.write(cr, uid, [move.id], defaults)
for move in too_many:
product_qty = move_product_qty[move.id]
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty, #TODO: put correct uos_qty
'product_uom': product_uoms[move.id]
}
prodlot_id = prodlot_ids.get(move.id)
if prodlot_ids.get(move.id):
defaults.update(prodlot_id=prodlot_id)
if new_picking:
defaults.update(picking_id=new_picking)
move_obj.write(cr, uid, [move.id], defaults)
# At first we confirm the new picking (if necessary)
if new_picking:
wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_confirm', cr)
# Then we finish the good picking
self.write(cr, uid, [pick.id], {'backorder_id': new_picking})
self.action_move(cr, uid, [new_picking], context=context)
wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_done', cr)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
delivered_pack_id = pick.id
back_order_name = self.browse(cr, uid, delivered_pack_id, context=context).name
self.message_post(cr, uid, new_picking, body=_("Back order <em>%s</em> has been <b>created</b>.") % (back_order_name), context=context)
else:
self.action_move(cr, uid, [pick.id], context=context)
wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_done', cr)
delivered_pack_id = pick.id
delivered_pack = self.browse(cr, uid, delivered_pack_id, context=context)
res[pick.id] = {'delivered_picking': delivered_pack.id or False}
return res
# views associated to each picking type
_VIEW_LIST = {
'out': 'view_picking_out_form',
'in': 'view_picking_in_form',
'internal': 'view_picking_form',
}
def _get_view_id(self, cr, uid, type):
"""Get the view id suiting the given type
@param type: the picking type as a string
@return: view i, or False if no view found
"""
res = self.pool.get('ir.model.data').get_object_reference(cr, uid,
'stock', self._VIEW_LIST.get(type, 'view_picking_form'))
return res and res[1] or False
class stock_production_lot(osv.osv):
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
reads = self.read(cr, uid, ids, ['name', 'prefix', 'ref'], context)
res = []
for record in reads:
name = record['name']
prefix = record['prefix']
if prefix:
name = prefix + '/' + name
if record['ref']:
name = '%s [%s]' % (name, record['ref'])
res.append((record['id'], name))
return res
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100):
args = args or []
ids = []
if name:
ids = self.search(cr, uid, [('prefix', '=', name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context)
else:
ids = self.search(cr, uid, args, limit=limit, context=context)
return self.name_get(cr, uid, ids, context)
_name = 'stock.production.lot'
_description = 'Serial Number'
def _get_stock(self, cr, uid, ids, field_name, arg, context=None):
""" Gets stock of products for locations
@return: Dictionary of values
"""
if context is None:
context = {}
if 'location_id' not in context:
locations = self.pool.get('stock.location').search(cr, uid, [('usage', '=', 'internal')], context=context)
else:
locations = context['location_id'] and [context['location_id']] or []
if isinstance(ids, (int, long)):
ids = [ids]
res = {}.fromkeys(ids, 0.0)
if locations:
cr.execute('''select
prodlot_id,
sum(qty)
from
stock_report_prodlots
where
location_id IN %s and prodlot_id IN %s group by prodlot_id''',(tuple(locations),tuple(ids),))
res.update(dict(cr.fetchall()))
return res
def _stock_search(self, cr, uid, obj, name, args, context=None):
""" Searches Ids of products
@return: Ids of locations
"""
locations = self.pool.get('stock.location').search(cr, uid, [('usage', '=', 'internal')])
cr.execute('''select
prodlot_id,
sum(qty)
from
stock_report_prodlots
where
location_id IN %s group by prodlot_id
having sum(qty) '''+ str(args[0][1]) + str(args[0][2]),(tuple(locations),))
res = cr.fetchall()
ids = [('id', 'in', map(lambda x: x[0], res))]
return ids
_columns = {
'name': fields.char('Serial Number', size=64, required=True, help="Unique Serial Number, will be displayed as: PREFIX/SERIAL [INT_REF]"),
'ref': fields.char('Internal Reference', size=256, help="Internal reference number in case it differs from the manufacturer's serial number"),
'prefix': fields.char('Prefix', size=64, help="Optional prefix to prepend when displaying this serial number: PREFIX/SERIAL [INT_REF]"),
'product_id': fields.many2one('product.product', 'Product', required=True, domain=[('type', '<>', 'service')]),
'date': fields.datetime('Creation Date', required=True),
'stock_available': fields.function(_get_stock, fnct_search=_stock_search, type="float", string="Available", select=True,
help="Current quantity of products with this Serial Number available in company warehouses",
digits_compute=dp.get_precision('Product Unit of Measure')),
'revisions': fields.one2many('stock.production.lot.revision', 'lot_id', 'Revisions'),
'company_id': fields.many2one('res.company', 'Company', select=True),
'move_ids': fields.one2many('stock.move', 'prodlot_id', 'Moves for this serial number', readonly=True),
}
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'),
'product_id': lambda x, y, z, c: c.get('product_id', False),
}
_sql_constraints = [
('name_ref_uniq', 'unique (name, ref)', 'The combination of Serial Number and internal reference must be unique !'),
]
def action_traceability(self, cr, uid, ids, context=None):
""" It traces the information of a product
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return: A dictionary of values
"""
value=self.pool.get('action.traceability').action_traceability(cr,uid,ids,context)
return value
def copy(self, cr, uid, id, default=None, context=None):
context = context or {}
default = default and default.copy() or {}
default.update(date=time.strftime('%Y-%m-%d %H:%M:%S'), move_ids=[])
return super(stock_production_lot, self).copy(cr, uid, id, default=default, context=context)
stock_production_lot()
class stock_production_lot_revision(osv.osv):
_name = 'stock.production.lot.revision'
_description = 'Serial Number Revision'
_columns = {
'name': fields.char('Revision Name', size=64, required=True),
'description': fields.text('Description'),
'date': fields.date('Revision Date'),
'indice': fields.char('Revision Number', size=16),
'author_id': fields.many2one('res.users', 'Author'),
'lot_id': fields.many2one('stock.production.lot', 'Serial Number', select=True, ondelete='cascade'),
'company_id': fields.related('lot_id','company_id',type='many2one',relation='res.company',string='Company', store=True, readonly=True),
}
_defaults = {
'author_id': lambda x, y, z, c: z,
'date': fields.date.context_today,
}
stock_production_lot_revision()
# ----------------------------------------------------
# Move
# ----------------------------------------------------
#
# Fields:
# location_dest_id is only used for predicting futur stocks
#
class stock_move(osv.osv):
def _getSSCC(self, cr, uid, context=None):
cr.execute('select id from stock_tracking where create_uid=%s order by id desc limit 1', (uid,))
res = cr.fetchone()
return (res and res[0]) or False
_name = "stock.move"
_description = "Stock Move"
_order = 'date_expected desc, id'
_log_create = False
def action_partial_move(self, cr, uid, ids, context=None):
if context is None: context = {}
if context.get('active_model') != self._name:
context.update(active_ids=ids, active_model=self._name)
partial_id = self.pool.get("stock.partial.move").create(
cr, uid, {}, context=context)
return {
'name':_("Products to Process"),
'view_mode': 'form',
'view_id': False,
'view_type': 'form',
'res_model': 'stock.partial.move',
'res_id': partial_id,
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'domain': '[]',
'context': context
}
def name_get(self, cr, uid, ids, context=None):
res = []
for line in self.browse(cr, uid, ids, context=context):
name = line.location_id.name+' > '+line.location_dest_id.name
# optional prefixes
if line.product_id.code:
name = line.product_id.code + ': ' + name
if line.picking_id.origin:
name = line.picking_id.origin + '/ ' + name
res.append((line.id, name))
return res
def _check_tracking(self, cr, uid, ids, context=None):
""" Checks if serial number is assigned to stock move or not.
@return: True or False
"""
for move in self.browse(cr, uid, ids, context=context):
if not move.prodlot_id and \
(move.state == 'done' and \
( \
(move.product_id.track_production and move.location_id.usage == 'production') or \
(move.product_id.track_production and move.location_dest_id.usage == 'production') or \
(move.product_id.track_incoming and move.location_id.usage == 'supplier') or \
(move.product_id.track_outgoing and move.location_dest_id.usage == 'customer') or \
(move.product_id.track_incoming and move.location_id.usage == 'inventory') \
)):
return False
return True
def _check_product_lot(self, cr, uid, ids, context=None):
""" Checks whether move is done or not and production lot is assigned to that move.
@return: True or False
"""
for move in self.browse(cr, uid, ids, context=context):
if move.prodlot_id and move.state == 'done' and (move.prodlot_id.product_id.id != move.product_id.id):
return False
return True
_columns = {
'name': fields.char('Description', required=True, select=True),
'priority': fields.selection([('0', 'Not urgent'), ('1', 'Urgent')], 'Priority'),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
'date': fields.datetime('Date', required=True, select=True, help="Move date: scheduled date until move is done, then date of actual move processing", states={'done': [('readonly', True)]}),
'date_expected': fields.datetime('Scheduled Date', states={'done': [('readonly', True)]},required=True, select=True, help="Scheduled date for the processing of this move"),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True, domain=[('type','<>','service')],states={'done': [('readonly', True)]}),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'),
required=True,states={'done': [('readonly', True)]},
help="This is the quantity of products from an inventory "
"point of view. For moves in the state 'done', this is the "
"quantity of products that were actually moved. For other "
"moves, this is the quantity of product that is planned to "
"be moved. Lowering this quantity does not generate a "
"backorder. Changing this quantity on assigned moves affects "
"the product reservation, and should be done with care."
),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', required=True,states={'done': [('readonly', True)]}),
'product_uos_qty': fields.float('Quantity (UOS)', digits_compute=dp.get_precision('Product Unit of Measure'), states={'done': [('readonly', True)]}),
'product_uos': fields.many2one('product.uom', 'Product UOS', states={'done': [('readonly', True)]}),
'product_packaging': fields.many2one('product.packaging', 'Packaging', help="It specifies attributes of packaging like type, quantity of packaging,etc."),
'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True,states={'done': [('readonly', True)]}, help="Sets a location if you produce at a fixed location. This can be a partner location if you subcontract the manufacturing operations."),
'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True,states={'done': [('readonly', True)]}, select=True, help="Location where the system will stock the finished products."),
'partner_id': fields.many2one('res.partner', 'Destination Address ', states={'done': [('readonly', True)]}, help="Optional address where goods are to be delivered, specifically used for allotment"),
'prodlot_id': fields.many2one('stock.production.lot', 'Serial Number', states={'done': [('readonly', True)]}, help="Serial number is used to put a serial number on the production", select=True),
'tracking_id': fields.many2one('stock.tracking', 'Pack', select=True, states={'done': [('readonly', True)]}, help="Logistical shipping unit: pallet, box, pack ..."),
'auto_validate': fields.boolean('Auto Validate'),
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True),
'move_history_ids': fields.many2many('stock.move', 'stock_move_history_ids', 'parent_id', 'child_id', 'Move History (child moves)'),
'move_history_ids2': fields.many2many('stock.move', 'stock_move_history_ids', 'child_id', 'parent_id', 'Move History (parent moves)'),
'picking_id': fields.many2one('stock.picking', 'Reference', select=True,states={'done': [('readonly', True)]}),
'note': fields.text('Notes'),
'state': fields.selection([('draft', 'New'),
('cancel', 'Cancelled'),
('waiting', 'Waiting Another Move'),
('confirmed', 'Waiting Availability'),
('assigned', 'Available'),
('done', 'Done'),
], 'Status', readonly=True, select=True,
help= "* New: When the stock move is created and not yet confirmed.\n"\
"* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"\
"* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to me manufactured...\n"\
"* Available: When products are reserved, it is set to \'Available\'.\n"\
"* Done: When the shipment is processed, the state is \'Done\'."),
'price_unit': fields.float('Unit Price', digits_compute= dp.get_precision('Product Price'), help="Technical field used to record the product cost set by the user during a picking confirmation (when average price costing method is used)"),
'price_currency_id': fields.many2one('res.currency', 'Currency for average price', help="Technical field used to record the currency chosen by the user during a picking confirmation (when average price costing method is used)"),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'backorder_id': fields.related('picking_id','backorder_id',type='many2one', relation="stock.picking", string="Back Order of", select=True),
'origin': fields.related('picking_id','origin',type='char', size=64, relation="stock.picking", string="Source", store=True),
# used for colors in tree views:
'scrapped': fields.related('location_dest_id','scrap_location',type='boolean',relation='stock.location',string='Scrapped', readonly=True),
'type': fields.related('picking_id', 'type', type='selection', selection=[('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], string='Shipping Type'),
}
def _check_location(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if (record.state=='done') and (record.location_id.usage == 'view'):
raise osv.except_osv(_('Error'), _('You cannot move product %s from a location of type view %s.')% (record.product_id.name, record.location_id.name))
if (record.state=='done') and (record.location_dest_id.usage == 'view' ):
raise osv.except_osv(_('Error'), _('You cannot move product %s to a location of type view %s.')% (record.product_id.name, record.location_dest_id.name))
return True
_constraints = [
(_check_tracking,
'You must assign a serial number for this product.',
['prodlot_id']),
(_check_location, 'You cannot move products from or to a location of the type view.',
['location_id','location_dest_id']),
(_check_product_lot,
'You try to assign a lot which is not from the same product.',
['prodlot_id'])]
def _default_location_destination(self, cr, uid, context=None):
""" Gets default address of partner for destination location
@return: Address id or False
"""
mod_obj = self.pool.get('ir.model.data')
picking_type = context.get('picking_type')
location_id = False
if context is None:
context = {}
if context.get('move_line', []):
if context['move_line'][0]:
if isinstance(context['move_line'][0], (tuple, list)):
location_id = context['move_line'][0][2] and context['move_line'][0][2].get('location_dest_id',False)
else:
move_list = self.pool.get('stock.move').read(cr, uid, context['move_line'][0], ['location_dest_id'])
location_id = move_list and move_list['location_dest_id'][0] or False
elif context.get('address_out_id', False):
property_out = self.pool.get('res.partner').browse(cr, uid, context['address_out_id'], context).property_stock_customer
location_id = property_out and property_out.id or False
else:
location_xml_id = False
if picking_type in ('in', 'internal'):
location_xml_id = 'stock_location_stock'
elif picking_type == 'out':
location_xml_id = 'stock_location_customers'
if location_xml_id:
try:
location_model, location_id = mod_obj.get_object_reference(cr, uid, 'stock', location_xml_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
def _default_location_source(self, cr, uid, context=None):
""" Gets default address of partner for source location
@return: Address id or False
"""
mod_obj = self.pool.get('ir.model.data')
picking_type = context.get('picking_type')
location_id = False
if context is None:
context = {}
if context.get('move_line', []):
try:
location_id = context['move_line'][0][2]['location_id']
except:
pass
elif context.get('address_in_id', False):
part_obj_add = self.pool.get('res.partner').browse(cr, uid, context['address_in_id'], context=context)
if part_obj_add:
location_id = part_obj_add.property_stock_supplier.id
else:
location_xml_id = False
if picking_type == 'in':
location_xml_id = 'stock_location_suppliers'
elif picking_type in ('out', 'internal'):
location_xml_id = 'stock_location_stock'
if location_xml_id:
try:
location_model, location_id = mod_obj.get_object_reference(cr, uid, 'stock', location_xml_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
def _default_destination_address(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
return user.company_id.partner_id.id
def _default_move_type(self, cr, uid, context=None):
""" Gets default type of move
@return: type
"""
if context is None:
context = {}
picking_type = context.get('picking_type')
type = 'internal'
if picking_type == 'in':
type = 'in'
elif picking_type == 'out':
type = 'out'
return type
_defaults = {
'location_id': _default_location_source,
'location_dest_id': _default_location_destination,
'partner_id': _default_destination_address,
'type': _default_move_type,
'state': 'draft',
'priority': '1',
'product_qty': 1.0,
'scrapped' : False,
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.move', context=c),
'date_expected': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
if uid != 1:
frozen_fields = set(['product_qty', 'product_uom', 'product_uos_qty', 'product_uos', 'location_id', 'location_dest_id', 'product_id'])
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
if frozen_fields.intersection(vals):
raise osv.except_osv(_('Operation Forbidden!'),
_('Quantities, Units of Measure, Products and Locations cannot be modified on stock moves that have already been processed (except by the Administrator).'))
return super(stock_move, self).write(cr, uid, ids, vals, context=context)
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
default.update({'move_history_ids2': [], 'move_history_ids': []})
return super(stock_move, self).copy(cr, uid, id, default, context=context)
def _auto_init(self, cursor, context=None):
res = super(stock_move, self)._auto_init(cursor, context=context)
cursor.execute('SELECT indexname \
FROM pg_indexes \
WHERE indexname = \'stock_move_location_id_location_dest_id_product_id_state\'')
if not cursor.fetchone():
cursor.execute('CREATE INDEX stock_move_location_id_location_dest_id_product_id_state \
ON stock_move (product_id, state, location_id, location_dest_id)')
return res
def onchange_lot_id(self, cr, uid, ids, prodlot_id=False, product_qty=False,
loc_id=False, product_id=False, uom_id=False, context=None):
""" On change of production lot gives a warning message.
@param prodlot_id: Changed production lot id
@param product_qty: Quantity of product
@param loc_id: Location id
@param product_id: Product id
@return: Warning message
"""
if not prodlot_id or not loc_id:
return {}
ctx = context and context.copy() or {}
ctx['location_id'] = loc_id
ctx.update({'raise-exception': True})
uom_obj = self.pool.get('product.uom')
product_obj = self.pool.get('product.product')
product_uom = product_obj.browse(cr, uid, product_id, context=ctx).uom_id
prodlot = self.pool.get('stock.production.lot').browse(cr, uid, prodlot_id, context=ctx)
location = self.pool.get('stock.location').browse(cr, uid, loc_id, context=ctx)
uom = uom_obj.browse(cr, uid, uom_id, context=ctx)
amount_actual = uom_obj._compute_qty_obj(cr, uid, product_uom, prodlot.stock_available, uom, context=ctx)
warning = {}
if (location.usage == 'internal') and (product_qty > (amount_actual or 0.0)):
warning = {
'title': _('Insufficient Stock for Serial Number !'),
'message': _('You are moving %.2f %s but only %.2f %s available for this serial number.') % (product_qty, uom.name, amount_actual, uom.name)
}
return {'warning': warning}
def onchange_quantity(self, cr, uid, ids, product_id, product_qty,
product_uom, product_uos):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_qty: Changed Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_uos_qty': 0.00
}
warning = {}
if (not product_id) or (product_qty <=0.0):
result['product_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# Warn if the quantity was decreased
if ids:
for move in self.read(cr, uid, ids, ['product_qty']):
if product_qty < move['product_qty']:
warning.update({
'title': _('Information'),
'message': _("By changing this quantity here, you accept the "
"new quantity as complete: OpenERP will not "
"automatically generate a back order.") })
break
if product_uos and product_uom and (product_uom != product_uos):
result['product_uos_qty'] = product_qty * uos_coeff['uos_coeff']
else:
result['product_uos_qty'] = product_qty
return {'value': result, 'warning': warning}
def onchange_uos_quantity(self, cr, uid, ids, product_id, product_uos_qty,
product_uos, product_uom):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_uos_qty: Changed UoS Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_qty': 0.00
}
warning = {}
if (not product_id) or (product_uos_qty <=0.0):
result['product_uos_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# Warn if the quantity was decreased
for move in self.read(cr, uid, ids, ['product_uos_qty']):
if product_uos_qty < move['product_uos_qty']:
warning.update({
'title': _('Warning: No Back Order'),
'message': _("By changing the quantity here, you accept the "
"new quantity as complete: OpenERP will not "
"automatically generate a Back Order.") })
break
if product_uos and product_uom and (product_uom != product_uos):
result['product_qty'] = product_uos_qty / uos_coeff['uos_coeff']
else:
result['product_qty'] = product_uos_qty
return {'value': result, 'warning': warning}
def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False,
loc_dest_id=False, partner_id=False):
""" On change of product id, if finds UoM, UoS, quantity and UoS quantity.
@param prod_id: Changed Product id
@param loc_id: Source location id
@param loc_dest_id: Destination location id
@param partner_id: Address id of partner
@return: Dictionary of values
"""
if not prod_id:
return {}
user = self.pool.get('res.users').browse(cr, uid, uid)
lang = user and user.lang or False
if partner_id:
addr_rec = self.pool.get('res.partner').browse(cr, uid, partner_id)
if addr_rec:
lang = addr_rec and addr_rec.lang or False
ctx = {'lang': lang}
product = self.pool.get('product.product').browse(cr, uid, [prod_id], context=ctx)[0]
uos_id = product.uos_id and product.uos_id.id or False
result = {
'product_uom': product.uom_id.id,
'product_uos': uos_id,
'product_qty': 1.00,
'product_uos_qty' : self.pool.get('stock.move').onchange_quantity(cr, uid, ids, prod_id, 1.00, product.uom_id.id, uos_id)['value']['product_uos_qty'],
'prodlot_id' : False,
}
if not ids:
result['name'] = product.partner_ref
if loc_id:
result['location_id'] = loc_id
if loc_dest_id:
result['location_dest_id'] = loc_dest_id
return {'value': result}
def onchange_move_type(self, cr, uid, ids, type, context=None):
""" On change of move type gives sorce and destination location.
@param type: Move Type
@return: Dictionary of values
"""
mod_obj = self.pool.get('ir.model.data')
location_source_id = 'stock_location_stock'
location_dest_id = 'stock_location_stock'
if type == 'in':
location_source_id = 'stock_location_suppliers'
location_dest_id = 'stock_location_stock'
elif type == 'out':
location_source_id = 'stock_location_stock'
location_dest_id = 'stock_location_customers'
try:
source_location = mod_obj.get_object_reference(cr, uid, 'stock', location_source_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [source_location[1]], 'read', context=context)
except (orm.except_orm, ValueError):
source_location = False
try:
dest_location = mod_obj.get_object_reference(cr, uid, 'stock', location_dest_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [dest_location[1]], 'read', context=context)
except (orm.except_orm, ValueError):
dest_location = False
return {'value':{'location_id': source_location and source_location[1] or False, 'location_dest_id': dest_location and dest_location[1] or False}}
def onchange_date(self, cr, uid, ids, date, date_expected, context=None):
""" On change of Scheduled Date gives a Move date.
@param date_expected: Scheduled Date
@param date: Move Date
@return: Move Date
"""
if not date_expected:
date_expected = time.strftime('%Y-%m-%d %H:%M:%S')
return {'value':{'date': date_expected}}
def _chain_compute(self, cr, uid, moves, context=None):
""" Finds whether the location has chained location type or not.
@param moves: Stock moves
@return: Dictionary containing destination location with chained location type.
"""
result = {}
for m in moves:
dest = self.pool.get('stock.location').chained_location_get(
cr,
uid,
m.location_dest_id,
m.picking_id and m.picking_id.partner_id and m.picking_id.partner_id,
m.product_id,
context
)
if dest:
if dest[1] == 'transparent':
newdate = (datetime.strptime(m.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=dest[2] or 0)).strftime('%Y-%m-%d')
self.write(cr, uid, [m.id], {
'date': newdate,
'location_dest_id': dest[0].id})
if m.picking_id and (dest[3] or dest[5]):
self.pool.get('stock.picking').write(cr, uid, [m.picking_id.id], {
'stock_journal_id': dest[3] or m.picking_id.stock_journal_id.id,
'type': dest[5] or m.picking_id.type
}, context=context)
m.location_dest_id = dest[0]
res2 = self._chain_compute(cr, uid, [m], context=context)
for pick_id in res2.keys():
result.setdefault(pick_id, [])
result[pick_id] += res2[pick_id]
else:
result.setdefault(m.picking_id, [])
result[m.picking_id].append( (m, dest) )
return result
def _prepare_chained_picking(self, cr, uid, picking_name, picking, picking_type, moves_todo, context=None):
"""Prepare the definition (values) to create a new chained picking.
:param str picking_name: desired new picking name
:param browse_record picking: source picking (being chained to)
:param str picking_type: desired new picking type
:param list moves_todo: specification of the stock moves to be later included in this
picking, in the form::
[[move, (dest_location, auto_packing, chained_delay, chained_journal,
chained_company_id, chained_picking_type)],
...
]
See also :meth:`stock_location.chained_location_get`.
"""
res_company = self.pool.get('res.company')
return {
'name': picking_name,
'origin': tools.ustr(picking.origin or ''),
'type': picking_type,
'note': picking.note,
'move_type': picking.move_type,
'auto_picking': moves_todo[0][1][1] == 'auto',
'stock_journal_id': moves_todo[0][1][3],
'company_id': moves_todo[0][1][4] or res_company._company_default_get(cr, uid, 'stock.company', context=context),
'partner_id': picking.partner_id.id,
'invoice_state': 'none',
'date': picking.date,
}
def _create_chained_picking(self, cr, uid, picking_name, picking, picking_type, moves_todo, context=None):
picking_obj = self.pool.get('stock.picking')
return picking_obj.create(cr, uid, self._prepare_chained_picking(cr, uid, picking_name, picking, picking_type, moves_todo, context=context))
def create_chained_picking(self, cr, uid, moves, context=None):
res_obj = self.pool.get('res.company')
location_obj = self.pool.get('stock.location')
move_obj = self.pool.get('stock.move')
wf_service = netsvc.LocalService("workflow")
new_moves = []
if context is None:
context = {}
seq_obj = self.pool.get('ir.sequence')
for picking, todo in self._chain_compute(cr, uid, moves, context=context).items():
ptype = todo[0][1][5] and todo[0][1][5] or location_obj.picking_type_get(cr, uid, todo[0][0].location_dest_id, todo[0][1][0])
if picking:
# name of new picking according to its type
if ptype == 'internal':
new_pick_name = seq_obj.get(cr, uid,'stock.picking')
else :
new_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + ptype)
pickid = self._create_chained_picking(cr, uid, new_pick_name, picking, ptype, todo, context=context)
# Need to check name of old picking because it always considers picking as "OUT" when created from Sales Order
old_ptype = location_obj.picking_type_get(cr, uid, picking.move_lines[0].location_id, picking.move_lines[0].location_dest_id)
if old_ptype != picking.type:
old_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + old_ptype)
self.pool.get('stock.picking').write(cr, uid, [picking.id], {'name': old_pick_name, 'type': old_ptype}, context=context)
else:
pickid = False
for move, (loc, dummy, delay, dummy, company_id, ptype, invoice_state) in todo:
new_id = move_obj.copy(cr, uid, move.id, {
'location_id': move.location_dest_id.id,
'location_dest_id': loc.id,
'date': time.strftime('%Y-%m-%d'),
'picking_id': pickid,
'state': 'waiting',
'company_id': company_id or res_obj._company_default_get(cr, uid, 'stock.company', context=context) ,
'move_history_ids': [],
'date_expected': (datetime.strptime(move.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=delay or 0)).strftime('%Y-%m-%d'),
'move_history_ids2': []}
)
move_obj.write(cr, uid, [move.id], {
'move_dest_id': new_id,
'move_history_ids': [(4, new_id)]
})
new_moves.append(self.browse(cr, uid, [new_id])[0])
if pickid:
wf_service.trg_validate(uid, 'stock.picking', pickid, 'button_confirm', cr)
if new_moves:
new_moves += self.create_chained_picking(cr, uid, new_moves, context)
return new_moves
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms stock move.
@return: List of ids.
"""
moves = self.browse(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'state': 'confirmed'})
self.create_chained_picking(cr, uid, moves, context)
return []
def action_assign(self, cr, uid, ids, *args):
""" Changes state to confirmed or waiting.
@return: List of values
"""
todo = []
for move in self.browse(cr, uid, ids):
if move.state in ('confirmed', 'waiting'):
todo.append(move.id)
res = self.check_assign(cr, uid, todo)
return res
def force_assign(self, cr, uid, ids, context=None):
""" Changes the state to assigned.
@return: True
"""
self.write(cr, uid, ids, {'state': 'assigned'})
wf_service = netsvc.LocalService('workflow')
for move in self.browse(cr, uid, ids, context):
if move.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.picking_id.id, cr)
return True
def cancel_assign(self, cr, uid, ids, context=None):
""" Changes the state to confirmed.
@return: True
"""
self.write(cr, uid, ids, {'state': 'confirmed'})
# fix for bug lp:707031
# called write of related picking because changing move availability does
# not trigger workflow of picking in order to change the state of picking
wf_service = netsvc.LocalService('workflow')
for move in self.browse(cr, uid, ids, context):
if move.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.picking_id.id, cr)
return True
#
# Duplicate stock.move
#
def check_assign(self, cr, uid, ids, context=None):
""" Checks the product type and accordingly writes the state.
@return: No. of moves done
"""
done = []
count = 0
pickings = {}
if context is None:
context = {}
for move in self.browse(cr, uid, ids, context=context):
if move.product_id.type == 'consu' or move.location_id.usage == 'supplier':
if move.state in ('confirmed', 'waiting'):
done.append(move.id)
pickings[move.picking_id.id] = 1
continue
if move.state in ('confirmed', 'waiting'):
# Important: we must pass lock=True to _product_reserve() to avoid race conditions and double reservations
res = self.pool.get('stock.location')._product_reserve(cr, uid, [move.location_id.id], move.product_id.id, move.product_qty, {'uom': move.product_uom.id}, lock=True)
if res:
#_product_available_test depends on the next status for correct functioning
#the test does not work correctly if the same product occurs multiple times
#in the same order. This is e.g. the case when using the button 'split in two' of
#the stock outgoing form
self.write(cr, uid, [move.id], {'state':'assigned'})
done.append(move.id)
pickings[move.picking_id.id] = 1
r = res.pop(0)
product_uos_qty = self.pool.get('stock.move').onchange_quantity(cr, uid, ids, move.product_id.id, r[0], move.product_id.uom_id.id, move.product_id.uos_id.id)['value']['product_uos_qty']
cr.execute('update stock_move set location_id=%s, product_qty=%s, product_uos_qty=%s where id=%s', (r[1], r[0],product_uos_qty, move.id))
while res:
r = res.pop(0)
product_uos_qty = self.pool.get('stock.move').onchange_quantity(cr, uid, ids, move.product_id.id, r[0], move.product_id.uom_id.id, move.product_id.uos_id.id)['value']['product_uos_qty']
move_id = self.copy(cr, uid, move.id, {'product_uos_qty': product_uos_qty, 'product_qty': r[0], 'location_id': r[1]})
done.append(move_id)
if done:
count += len(done)
self.write(cr, uid, done, {'state': 'assigned'})
if count:
for pick_id in pickings:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, 'stock.picking', pick_id, cr)
return count
def setlast_tracking(self, cr, uid, ids, context=None):
tracking_obj = self.pool.get('stock.tracking')
picking = self.browse(cr, uid, ids, context=context)[0].picking_id
if picking:
last_track = [line.tracking_id.id for line in picking.move_lines if line.tracking_id]
if not last_track:
last_track = tracking_obj.create(cr, uid, {}, context=context)
else:
last_track.sort()
last_track = last_track[-1]
self.write(cr, uid, ids, {'tracking_id': last_track})
return True
#
# Cancel move => cancel others move and pickings
#
def action_cancel(self, cr, uid, ids, context=None):
""" Cancels the moves and if all moves are cancelled it cancels the picking.
@return: True
"""
if not len(ids):
return True
if context is None:
context = {}
pickings = set()
for move in self.browse(cr, uid, ids, context=context):
if move.state in ('confirmed', 'waiting', 'assigned', 'draft'):
if move.picking_id:
pickings.add(move.picking_id.id)
if move.move_dest_id and move.move_dest_id.state == 'waiting':
self.write(cr, uid, [move.move_dest_id.id], {'state': 'confirmed'}, context=context)
if context.get('call_unlink',False) and move.move_dest_id.picking_id:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr)
self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}, context=context)
if not context.get('call_unlink',False):
for pick in self.pool.get('stock.picking').browse(cr, uid, list(pickings), context=context):
if all(move.state == 'cancel' for move in pick.move_lines):
self.pool.get('stock.picking').write(cr, uid, [pick.id], {'state': 'cancel'}, context=context)
wf_service = netsvc.LocalService("workflow")
for id in ids:
wf_service.trg_trigger(uid, 'stock.move', id, cr)
return True
def _get_accounting_data_for_valuation(self, cr, uid, move, context=None):
"""
Return the accounts and journal to use to post Journal Entries for the real-time
valuation of the move.
:param context: context dictionary that can explicitly mention the company to consider via the 'force_company' key
:raise: osv.except_osv() is any mandatory account or journal is not defined.
"""
product_obj=self.pool.get('product.product')
accounts = product_obj.get_product_accounts(cr, uid, move.product_id.id, context)
if move.location_id.valuation_out_account_id:
acc_src = move.location_id.valuation_out_account_id.id
else:
acc_src = accounts['stock_account_input']
if move.location_dest_id.valuation_in_account_id:
acc_dest = move.location_dest_id.valuation_in_account_id.id
else:
acc_dest = accounts['stock_account_output']
acc_valuation = accounts.get('property_stock_valuation_account_id', False)
journal_id = accounts['stock_journal']
if acc_dest == acc_valuation:
raise osv.except_osv(_('Error!'), _('Cannot create Journal Entry, Output Account of this product and Valuation account on category of this product are same.'))
if acc_src == acc_valuation:
raise osv.except_osv(_('Error!'), _('Cannot create Journal Entry, Input Account of this product and Valuation account on category of this product are same.'))
if not acc_src:
raise osv.except_osv(_('Error!'), _('Please define stock input account for this product or its category: "%s" (id: %d)') % \
(move.product_id.name, move.product_id.id,))
if not acc_dest:
raise osv.except_osv(_('Error!'), _('Please define stock output account for this product or its category: "%s" (id: %d)') % \
(move.product_id.name, move.product_id.id,))
if not journal_id:
raise osv.except_osv(_('Error!'), _('Please define journal on the product category: "%s" (id: %d)') % \
(move.product_id.categ_id.name, move.product_id.categ_id.id,))
if not acc_valuation:
raise osv.except_osv(_('Error!'), _('Please define inventory valuation account on the product category: "%s" (id: %d)') % \
(move.product_id.categ_id.name, move.product_id.categ_id.id,))
return journal_id, acc_src, acc_dest, acc_valuation
def _get_reference_accounting_values_for_valuation(self, cr, uid, move, context=None):
"""
Return the reference amount and reference currency representing the inventory valuation for this move.
These reference values should possibly be converted before being posted in Journals to adapt to the primary
and secondary currencies of the relevant accounts.
"""
product_uom_obj = self.pool.get('product.uom')
# by default the reference currency is that of the move's company
reference_currency_id = move.company_id.currency_id.id
default_uom = move.product_id.uom_id.id
qty = product_uom_obj._compute_qty(cr, uid, move.product_uom.id, move.product_qty, default_uom)
# if product is set to average price and a specific value was entered in the picking wizard,
# we use it
if move.product_id.cost_method == 'average' and move.price_unit:
reference_amount = qty * move.price_unit
reference_currency_id = move.price_currency_id.id or reference_currency_id
# Otherwise we default to the company's valuation price type, considering that the values of the
# valuation field are expressed in the default currency of the move's company.
else:
if context is None:
context = {}
currency_ctx = dict(context, currency_id = move.company_id.currency_id.id)
amount_unit = move.product_id.price_get('standard_price', context=currency_ctx)[move.product_id.id]
reference_amount = amount_unit * qty
return reference_amount, reference_currency_id
def _create_product_valuation_moves(self, cr, uid, move, context=None):
"""
Generate the appropriate accounting moves if the product being moves is subject
to real_time valuation tracking, and the source or destination location is
a transit location or is outside of the company.
"""
if move.product_id.valuation == 'real_time': # FIXME: product valuation should perhaps be a property?
if context is None:
context = {}
src_company_ctx = dict(context,force_company=move.location_id.company_id.id)
dest_company_ctx = dict(context,force_company=move.location_dest_id.company_id.id)
account_moves = []
# Outgoing moves (or cross-company output part)
if move.location_id.company_id \
and (move.location_id.usage == 'internal' and move.location_dest_id.usage != 'internal'\
or move.location_id.company_id != move.location_dest_id.company_id):
journal_id, acc_src, acc_dest, acc_valuation = self._get_accounting_data_for_valuation(cr, uid, move, src_company_ctx)
reference_amount, reference_currency_id = self._get_reference_accounting_values_for_valuation(cr, uid, move, src_company_ctx)
#returning goods to supplier
if move.location_dest_id.usage == 'supplier':
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_valuation, acc_src, reference_amount, reference_currency_id, context))]
else:
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_valuation, acc_dest, reference_amount, reference_currency_id, context))]
# Incoming moves (or cross-company input part)
if move.location_dest_id.company_id \
and (move.location_id.usage != 'internal' and move.location_dest_id.usage == 'internal'\
or move.location_id.company_id != move.location_dest_id.company_id):
journal_id, acc_src, acc_dest, acc_valuation = self._get_accounting_data_for_valuation(cr, uid, move, dest_company_ctx)
reference_amount, reference_currency_id = self._get_reference_accounting_values_for_valuation(cr, uid, move, src_company_ctx)
#goods return from customer
if move.location_id.usage == 'customer':
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_dest, acc_valuation, reference_amount, reference_currency_id, context))]
else:
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_src, acc_valuation, reference_amount, reference_currency_id, context))]
move_obj = self.pool.get('account.move')
for j_id, move_lines in account_moves:
move_obj.create(cr, uid,
{
'journal_id': j_id,
'line_id': move_lines,
'ref': move.picking_id and move.picking_id.name}, context=context)
def action_done(self, cr, uid, ids, context=None):
""" Makes the move done and if all moves are done, it will finish the picking.
@return:
"""
picking_ids = []
move_ids = []
wf_service = netsvc.LocalService("workflow")
if context is None:
context = {}
todo = []
for move in self.browse(cr, uid, ids, context=context):
if move.state=="draft":
todo.append(move.id)
if todo:
self.action_confirm(cr, uid, todo, context=context)
todo = []
for move in self.browse(cr, uid, ids, context=context):
if move.state in ['done','cancel']:
continue
move_ids.append(move.id)
if move.picking_id:
picking_ids.append(move.picking_id.id)
if move.move_dest_id.id and (move.state != 'done'):
# Downstream move should only be triggered if this move is the last pending upstream move
other_upstream_move_ids = self.search(cr, uid, [('id','!=',move.id),('state','not in',['done','cancel']),
('move_dest_id','=',move.move_dest_id.id)], context=context)
if not other_upstream_move_ids:
self.write(cr, uid, [move.id], {'move_history_ids': [(4, move.move_dest_id.id)]})
if move.move_dest_id.state in ('waiting', 'confirmed'):
self.force_assign(cr, uid, [move.move_dest_id.id], context=context)
if move.move_dest_id.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr)
if move.move_dest_id.auto_validate:
self.action_done(cr, uid, [move.move_dest_id.id], context=context)
self._create_product_valuation_moves(cr, uid, move, context=context)
if move.state not in ('confirmed','done','assigned'):
todo.append(move.id)
if todo:
self.action_confirm(cr, uid, todo, context=context)
self.write(cr, uid, move_ids, {'state': 'done', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context)
for id in move_ids:
wf_service.trg_trigger(uid, 'stock.move', id, cr)
for pick_id in picking_ids:
wf_service.trg_write(uid, 'stock.picking', pick_id, cr)
return True
def _create_account_move_line(self, cr, uid, move, src_account_id, dest_account_id, reference_amount, reference_currency_id, context=None):
"""
Generate the account.move.line values to post to track the stock valuation difference due to the
processing of the given stock move.
"""
# prepare default values considering that the destination accounts have the reference_currency_id as their main currency
partner_id = (move.picking_id.partner_id and self.pool.get('res.partner')._find_accounting_partner(move.picking_id.partner_id).id) or False
debit_line_vals = {
'name': move.name,
'product_id': move.product_id and move.product_id.id or False,
'quantity': move.product_qty,
'ref': move.picking_id and move.picking_id.name or False,
'date': time.strftime('%Y-%m-%d'),
'partner_id': partner_id,
'debit': reference_amount,
'account_id': dest_account_id,
}
credit_line_vals = {
'name': move.name,
'product_id': move.product_id and move.product_id.id or False,
'quantity': move.product_qty,
'ref': move.picking_id and move.picking_id.name or False,
'date': time.strftime('%Y-%m-%d'),
'partner_id': partner_id,
'credit': reference_amount,
'account_id': src_account_id,
}
# if we are posting to accounts in a different currency, provide correct values in both currencies correctly
# when compatible with the optional secondary currency on the account.
# Financial Accounts only accept amounts in secondary currencies if there's no secondary currency on the account
# or if it's the same as that of the secondary amount being posted.
account_obj = self.pool.get('account.account')
src_acct, dest_acct = account_obj.browse(cr, uid, [src_account_id, dest_account_id], context=context)
src_main_currency_id = src_acct.company_id.currency_id.id
dest_main_currency_id = dest_acct.company_id.currency_id.id
cur_obj = self.pool.get('res.currency')
if reference_currency_id != src_main_currency_id:
# fix credit line:
credit_line_vals['credit'] = cur_obj.compute(cr, uid, reference_currency_id, src_main_currency_id, reference_amount, context=context)
if (not src_acct.currency_id) or src_acct.currency_id.id == reference_currency_id:
credit_line_vals.update(currency_id=reference_currency_id, amount_currency=-reference_amount)
if reference_currency_id != dest_main_currency_id:
# fix debit line:
debit_line_vals['debit'] = cur_obj.compute(cr, uid, reference_currency_id, dest_main_currency_id, reference_amount, context=context)
if (not dest_acct.currency_id) or dest_acct.currency_id.id == reference_currency_id:
debit_line_vals.update(currency_id=reference_currency_id, amount_currency=reference_amount)
return [(0, 0, debit_line_vals), (0, 0, credit_line_vals)]
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
ctx = context.copy()
for move in self.browse(cr, uid, ids, context=context):
if move.state != 'draft' and not ctx.get('call_unlink', False):
raise osv.except_osv(_('User Error!'), _('You can only delete draft moves.'))
return super(stock_move, self).unlink(
cr, uid, ids, context=ctx)
# _create_lot function is not used anywhere
def _create_lot(self, cr, uid, ids, product_id, prefix=False):
""" Creates production lot
@return: Production lot id
"""
prodlot_obj = self.pool.get('stock.production.lot')
prodlot_id = prodlot_obj.create(cr, uid, {'prefix': prefix, 'product_id': product_id})
return prodlot_id
def action_scrap(self, cr, uid, ids, quantity, location_id, context=None):
""" Move the scrap/damaged product into scrap location
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be scrapped
@param quantity : specify scrap qty
@param location_id : specify scrap location
@param context: context arguments
@return: Scraped lines
"""
#quantity should in MOVE UOM
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide a positive quantity to scrap.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
source_location = move.location_id
if move.state == 'done':
source_location = move.location_dest_id
if source_location.usage != 'internal':
#restrict to scrap from a virtual location because it's meaningless and it may introduce errors in stock ('creating' new products from nowhere)
raise osv.except_osv(_('Error!'), _('Forbidden operation: it is not allowed to scrap products from a virtual location.'))
move_qty = move.product_qty
uos_qty = quantity / move_qty * move.product_uos_qty
default_val = {
'location_id': source_location.id,
'product_qty': quantity,
'product_uos_qty': uos_qty,
'state': move.state,
'scrapped': True,
'location_dest_id': location_id,
'tracking_id': move.tracking_id.id,
'prodlot_id': move.prodlot_id.id,
}
new_move = self.copy(cr, uid, move.id, default_val)
res += [new_move]
product_obj = self.pool.get('product.product')
for product in product_obj.browse(cr, uid, [move.product_id.id], context=context):
if move.picking_id:
uom = product.uom_id.name if product.uom_id else ''
message = _("%s %s %s has been <b>moved to</b> scrap.") % (quantity, uom, product.name)
move.picking_id.message_post(body=message)
self.action_done(cr, uid, res, context=context)
return res
# action_split function is not used anywhere
# FIXME: deprecate this method
def action_split(self, cr, uid, ids, quantity, split_by_qty=1, prefix=False, with_lot=True, context=None):
""" Split Stock Move lines into production lot which specified split by quantity.
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be splited
@param split_by_qty : specify split by qty
@param prefix : specify prefix of production lot
@param with_lot : if true, prodcution lot will assign for split line otherwise not.
@param context: context arguments
@return: Splited move lines
"""
if context is None:
context = {}
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
if split_by_qty <= 0 or quantity == 0:
return res
uos_qty = split_by_qty / move.product_qty * move.product_uos_qty
quantity_rest = quantity % split_by_qty
uos_qty_rest = split_by_qty / move.product_qty * move.product_uos_qty
update_val = {
'product_qty': split_by_qty,
'product_uos_qty': uos_qty,
}
for idx in range(int(quantity//split_by_qty)):
if not idx and move.product_qty<=quantity:
current_move = move.id
else:
current_move = self.copy(cr, uid, move.id, {'state': move.state})
res.append(current_move)
if with_lot:
update_val['prodlot_id'] = self._create_lot(cr, uid, [current_move], move.product_id.id)
self.write(cr, uid, [current_move], update_val)
if quantity_rest > 0:
idx = int(quantity//split_by_qty)
update_val['product_qty'] = quantity_rest
update_val['product_uos_qty'] = uos_qty_rest
if not idx and move.product_qty<=quantity:
current_move = move.id
else:
current_move = self.copy(cr, uid, move.id, {'state': move.state})
res.append(current_move)
if with_lot:
update_val['prodlot_id'] = self._create_lot(cr, uid, [current_move], move.product_id.id)
self.write(cr, uid, [current_move], update_val)
return res
def action_consume(self, cr, uid, ids, quantity, location_id=False, context=None):
""" Consumed product with specific quatity from specific source location
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be consumed
@param quantity : specify consume quantity
@param location_id : specify source location
@param context: context arguments
@return: Consumed lines
"""
#quantity should in MOVE UOM
if context is None:
context = {}
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
move_qty = move.product_qty
if move_qty <= 0:
raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))
quantity_rest = move.product_qty
quantity_rest -= quantity
uos_qty_rest = quantity_rest / move_qty * move.product_uos_qty
if quantity_rest <= 0:
quantity_rest = 0
uos_qty_rest = 0
quantity = move.product_qty
uos_qty = quantity / move_qty * move.product_uos_qty
if float_compare(quantity_rest, 0, precision_rounding=move.product_id.uom_id.rounding):
default_val = {
'product_qty': quantity,
'product_uos_qty': uos_qty,
'state': move.state,
'location_id': location_id or move.location_id.id,
}
current_move = self.copy(cr, uid, move.id, default_val)
res += [current_move]
update_val = {}
update_val['product_qty'] = quantity_rest
update_val['product_uos_qty'] = uos_qty_rest
self.write(cr, uid, [move.id], update_val)
else:
quantity_rest = quantity
uos_qty_rest = uos_qty
res += [move.id]
update_val = {
'product_qty' : quantity_rest,
'product_uos_qty' : uos_qty_rest,
'location_id': location_id or move.location_id.id,
}
self.write(cr, uid, [move.id], update_val)
self.action_done(cr, uid, res, context=context)
return res
# FIXME: needs refactoring, this code is partially duplicated in stock_picking.do_partial()!
def do_partial(self, cr, uid, ids, partial_datas, context=None):
""" Makes partial pickings and moves done.
@param partial_datas: Dictionary containing details of partial picking
like partner_id, delivery_date, delivery
moves with product_id, product_qty, uom
"""
res = {}
picking_obj = self.pool.get('stock.picking')
product_obj = self.pool.get('product.product')
currency_obj = self.pool.get('res.currency')
uom_obj = self.pool.get('product.uom')
wf_service = netsvc.LocalService("workflow")
if context is None:
context = {}
complete, too_many, too_few = [], [], []
move_product_qty = {}
prodlot_ids = {}
for move in self.browse(cr, uid, ids, context=context):
if move.state in ('done', 'cancel'):
continue
partial_data = partial_datas.get('move%s'%(move.id), False)
assert partial_data, _('Missing partial picking data for move #%s.') % (move.id)
product_qty = partial_data.get('product_qty',0.0)
move_product_qty[move.id] = product_qty
product_uom = partial_data.get('product_uom',False)
product_price = partial_data.get('product_price',0.0)
product_currency = partial_data.get('product_currency',False)
prodlot_ids[move.id] = partial_data.get('prodlot_id')
if move.product_qty == product_qty:
complete.append(move)
elif move.product_qty > product_qty:
too_few.append(move)
else:
too_many.append(move)
# Average price computation
if (move.picking_id.type == 'in') and (move.product_id.cost_method == 'average'):
product = product_obj.browse(cr, uid, move.product_id.id)
move_currency_id = move.company_id.currency_id.id
context['currency_id'] = move_currency_id
qty = uom_obj._compute_qty(cr, uid, product_uom, product_qty, product.uom_id.id)
if qty > 0:
new_price = currency_obj.compute(cr, uid, product_currency,
move_currency_id, product_price, round=False)
new_price = uom_obj._compute_price(cr, uid, product_uom, new_price,
product.uom_id.id)
if product.qty_available <= 0:
new_std_price = new_price
else:
# Get the standard price
amount_unit = product.price_get('standard_price', context=context)[product.id]
new_std_price = ((amount_unit * product.qty_available)\
+ (new_price * qty))/(product.qty_available + qty)
product_obj.write(cr, uid, [product.id],{'standard_price': new_std_price})
# Record the values that were chosen in the wizard, so they can be
# used for inventory valuation if real-time valuation is enabled.
self.write(cr, uid, [move.id],
{'price_unit': product_price,
'price_currency_id': product_currency,
})
for move in too_few:
product_qty = move_product_qty[move.id]
if product_qty != 0:
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty,
'picking_id' : move.picking_id.id,
'state': 'assigned',
'move_dest_id': False,
'price_unit': move.price_unit,
}
prodlot_id = prodlot_ids[move.id]
if prodlot_id:
defaults.update(prodlot_id=prodlot_id)
new_move = self.copy(cr, uid, move.id, defaults)
complete.append(self.browse(cr, uid, new_move))
self.write(cr, uid, [move.id],
{
'product_qty': move.product_qty - product_qty,
'product_uos_qty': move.product_qty - product_qty,
'prodlot_id': False,
'tracking_id': False,
})
for move in too_many:
self.write(cr, uid, [move.id],
{
'product_qty': move.product_qty,
'product_uos_qty': move.product_qty,
})
complete.append(move)
for move in complete:
if prodlot_ids.get(move.id):
self.write(cr, uid, [move.id],{'prodlot_id': prodlot_ids.get(move.id)})
self.action_done(cr, uid, [move.id], context=context)
if move.picking_id.id :
# TOCHECK : Done picking if all moves are done
cr.execute("""
SELECT move.id FROM stock_picking pick
RIGHT JOIN stock_move move ON move.picking_id = pick.id AND move.state = %s
WHERE pick.id = %s""",
('done', move.picking_id.id))
res = cr.fetchall()
if len(res) == len(move.picking_id.move_lines):
picking_obj.action_move(cr, uid, [move.picking_id.id])
wf_service.trg_validate(uid, 'stock.picking', move.picking_id.id, 'button_done', cr)
return [move.id for move in complete]
stock_move()
class stock_inventory(osv.osv):
_name = "stock.inventory"
_description = "Inventory"
_columns = {
'name': fields.char('Inventory Reference', size=64, required=True, readonly=True, states={'draft': [('readonly', False)]}),
'date': fields.datetime('Creation Date', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'date_done': fields.datetime('Date done'),
'inventory_line_id': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=True, states={'draft': [('readonly', False)]}),
'move_ids': fields.many2many('stock.move', 'stock_inventory_move_rel', 'inventory_id', 'move_id', 'Created Moves'),
'state': fields.selection( (('draft', 'Draft'), ('cancel','Cancelled'), ('confirm','Confirmed'), ('done', 'Done')), 'Status', readonly=True, select=True),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, readonly=True, states={'draft':[('readonly',False)]}),
}
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'state': 'draft',
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c)
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
default.update({'move_ids': [], 'date_done': False})
return super(stock_inventory, self).copy(cr, uid, id, default, context=context)
def _inventory_line_hook(self, cr, uid, inventory_line, move_vals):
""" Creates a stock move from an inventory line
@param inventory_line:
@param move_vals:
@return:
"""
return self.pool.get('stock.move').create(cr, uid, move_vals)
def action_done(self, cr, uid, ids, context=None):
""" Finish the inventory
@return: True
"""
if context is None:
context = {}
move_obj = self.pool.get('stock.move')
for inv in self.browse(cr, uid, ids, context=context):
move_obj.action_done(cr, uid, [x.id for x in inv.move_ids], context=context)
self.write(cr, uid, [inv.id], {'state':'done', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
return True
def action_confirm(self, cr, uid, ids, context=None):
""" Confirm the inventory and writes its finished date
@return: True
"""
if context is None:
context = {}
# to perform the correct inventory corrections we need analyze stock location by
# location, never recursively, so we use a special context
product_context = dict(context, compute_child=False)
location_obj = self.pool.get('stock.location')
for inv in self.browse(cr, uid, ids, context=context):
move_ids = []
for line in inv.inventory_line_id:
pid = line.product_id.id
product_context.update(uom=line.product_uom.id, to_date=inv.date, date=inv.date, prodlot_id=line.prod_lot_id.id)
amount = location_obj._product_get(cr, uid, line.location_id.id, [pid], product_context)[pid]
change = line.product_qty - amount
lot_id = line.prod_lot_id.id
if change:
location_id = line.product_id.property_stock_inventory.id
value = {
'name': _('INV:') + (line.inventory_id.name or ''),
'product_id': line.product_id.id,
'product_uom': line.product_uom.id,
'prodlot_id': lot_id,
'date': inv.date,
}
if change > 0:
value.update( {
'product_qty': change,
'location_id': location_id,
'location_dest_id': line.location_id.id,
})
else:
value.update( {
'product_qty': -change,
'location_id': line.location_id.id,
'location_dest_id': location_id,
})
move_ids.append(self._inventory_line_hook(cr, uid, line, value))
self.write(cr, uid, [inv.id], {'state': 'confirm', 'move_ids': [(6, 0, move_ids)]})
self.pool.get('stock.move').action_confirm(cr, uid, move_ids, context=context)
return True
def action_cancel_draft(self, cr, uid, ids, context=None):
""" Cancels the stock move and change inventory state to draft.
@return: True
"""
for inv in self.browse(cr, uid, ids, context=context):
self.pool.get('stock.move').action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context)
self.write(cr, uid, [inv.id], {'state':'draft'}, context=context)
return True
def action_cancel_inventory(self, cr, uid, ids, context=None):
""" Cancels both stock move and inventory
@return: True
"""
move_obj = self.pool.get('stock.move')
account_move_obj = self.pool.get('account.move')
for inv in self.browse(cr, uid, ids, context=context):
move_obj.action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context)
for move in inv.move_ids:
account_move_ids = account_move_obj.search(cr, uid, [('name', '=', move.name)])
if account_move_ids:
account_move_data_l = account_move_obj.read(cr, uid, account_move_ids, ['state'], context=context)
for account_move in account_move_data_l:
if account_move['state'] == 'posted':
raise osv.except_osv(_('User Error!'),
_('In order to cancel this inventory, you must first unpost related journal entries.'))
account_move_obj.unlink(cr, uid, [account_move['id']], context=context)
self.write(cr, uid, [inv.id], {'state': 'cancel'}, context=context)
return True
stock_inventory()
class stock_inventory_line(osv.osv):
_name = "stock.inventory.line"
_description = "Inventory Line"
_rec_name = "inventory_id"
_columns = {
'inventory_id': fields.many2one('stock.inventory', 'Inventory', ondelete='cascade', select=True),
'location_id': fields.many2one('stock.location', 'Location', required=True),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure')),
'company_id': fields.related('inventory_id','company_id',type='many2one',relation='res.company',string='Company',store=True, select=True, readonly=True),
'prod_lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"),
'state': fields.related('inventory_id','state',type='char',string='Status',readonly=True),
}
def _default_stock_location(self, cr, uid, context=None):
try:
location_model, location_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock')
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
_defaults = {
'location_id': _default_stock_location
}
def on_change_product_id(self, cr, uid, ids, location_id, product, uom=False, to_date=False):
""" Changes UoM and name if product_id changes.
@param location_id: Location id
@param product: Changed product_id
@param uom: UoM product
@return: Dictionary of changed values
"""
if not product:
return {'value': {'product_qty': 0.0, 'product_uom': False, 'prod_lot_id': False}}
obj_product = self.pool.get('product.product').browse(cr, uid, product)
uom = uom or obj_product.uom_id.id
amount = self.pool.get('stock.location')._product_get(cr, uid, location_id, [product], {'uom': uom, 'to_date': to_date, 'compute_child': False})[product]
result = {'product_qty': amount, 'product_uom': uom, 'prod_lot_id': False}
return {'value': result}
stock_inventory_line()
#----------------------------------------------------------
# Stock Warehouse
#----------------------------------------------------------
class stock_warehouse(osv.osv):
_name = "stock.warehouse"
_description = "Warehouse"
_columns = {
'name': fields.char('Name', size=128, required=True, select=True),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'partner_id': fields.many2one('res.partner', 'Owner Address'),
'lot_input_id': fields.many2one('stock.location', 'Location Input', required=True, domain=[('usage','<>','view')]),
'lot_stock_id': fields.many2one('stock.location', 'Location Stock', required=True, domain=[('usage','=','internal')]),
'lot_output_id': fields.many2one('stock.location', 'Location Output', required=True, domain=[('usage','<>','view')]),
}
def _default_lot_input_stock_id(self, cr, uid, context=None):
try:
lot_input_stock_model, lot_input_stock_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock')
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [lot_input_stock_id], 'read', context=context)
except (ValueError, orm.except_orm):
# the user does not have read access on the location or it does not exists
lot_input_stock_id = False
return lot_input_stock_id
def _default_lot_output_id(self, cr, uid, context=None):
try:
lot_output_model, lot_output_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_output')
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [lot_output_id], 'read', context=context)
except (ValueError, orm.except_orm):
# the user does not have read access on the location or it does not exists
lot_output_id = False
return lot_output_id
_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c),
'lot_input_id': _default_lot_input_stock_id,
'lot_stock_id': _default_lot_input_stock_id,
'lot_output_id': _default_lot_output_id,
}
stock_warehouse()
#----------------------------------------------------------
# "Empty" Classes that are used to vary from the original stock.picking (that are dedicated to the internal pickings)
# in order to offer a different usability with different views, labels, available reports/wizards...
#----------------------------------------------------------
class stock_picking_in(osv.osv):
_name = "stock.picking.in"
_inherit = "stock.picking"
_table = "stock_picking"
_description = "Incoming Shipments"
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
return self.pool.get('stock.picking').search(cr, user, args, offset, limit, order, context, count)
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
return self.pool.get('stock.picking').read(cr, uid, ids, fields=fields, context=context, load=load)
def check_access_rights(self, cr, uid, operation, raise_exception=True):
#override in order to redirect the check of acces rights on the stock.picking object
return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
#override in order to redirect the check of acces rules on the stock.picking object
return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
#override in order to trigger the workflow of stock.picking at the end of create, write and unlink operation
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_trigger(cr, uid, ids, trigger, context=context)
def _workflow_signal(self, cr, uid, ids, signal, context=None):
#override in order to fire the workflow signal on given stock.picking workflow instance
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_signal(cr, uid, ids, signal, context=context)
def message_post(self, *args, **kwargs):
"""Post the message on stock.picking to be able to see it in the form view when using the chatter"""
return self.pool.get('stock.picking').message_post(*args, **kwargs)
def message_subscribe(self, *args, **kwargs):
"""Send the subscribe action on stock.picking model as it uses _name in request"""
return self.pool.get('stock.picking').message_subscribe(*args, **kwargs)
def message_unsubscribe(self, *args, **kwargs):
"""Send the unsubscribe action on stock.picking model to match with subscribe"""
return self.pool.get('stock.picking').message_unsubscribe(*args, **kwargs)
def default_get(self, cr, uid, fields_list, context=None):
# merge defaults from stock.picking with possible defaults defined on stock.picking.in
defaults = self.pool['stock.picking'].default_get(cr, uid, fields_list, context=context)
in_defaults = super(stock_picking_in, self).default_get(cr, uid, fields_list, context=context)
defaults.update(in_defaults)
return defaults
_columns = {
'backorder_id': fields.many2one('stock.picking.in', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'state': fields.selection(
[('draft', 'Draft'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Receive'),
('done', 'Received'),
('cancel', 'Cancelled'),],
'Status', readonly=True, select=True,
help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Receive: products reserved, simply waiting for confirmation.\n
* Received: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""),
}
_defaults = {
'type': 'in',
}
class stock_picking_out(osv.osv):
_name = "stock.picking.out"
_inherit = "stock.picking"
_table = "stock_picking"
_description = "Delivery Orders"
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
return self.pool.get('stock.picking').search(cr, user, args, offset, limit, order, context, count)
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
return self.pool.get('stock.picking').read(cr, uid, ids, fields=fields, context=context, load=load)
def check_access_rights(self, cr, uid, operation, raise_exception=True):
#override in order to redirect the check of acces rights on the stock.picking object
return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
#override in order to redirect the check of acces rules on the stock.picking object
return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
#override in order to trigger the workflow of stock.picking at the end of create, write and unlink operation
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_trigger(cr, uid, ids, trigger, context=context)
def _workflow_signal(self, cr, uid, ids, signal, context=None):
#override in order to fire the workflow signal on given stock.picking workflow instance
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_signal(cr, uid, ids, signal, context=context)
def message_post(self, *args, **kwargs):
"""Post the message on stock.picking to be able to see it in the form view when using the chatter"""
return self.pool.get('stock.picking').message_post(*args, **kwargs)
def message_subscribe(self, *args, **kwargs):
"""Send the subscribe action on stock.picking model as it uses _name in request"""
return self.pool.get('stock.picking').message_subscribe(*args, **kwargs)
def message_unsubscribe(self, *args, **kwargs):
"""Send the unsubscribe action on stock.picking model to match with subscribe"""
return self.pool.get('stock.picking').message_unsubscribe(*args, **kwargs)
def default_get(self, cr, uid, fields_list, context=None):
# merge defaults from stock.picking with possible defaults defined on stock.picking.out
defaults = self.pool['stock.picking'].default_get(cr, uid, fields_list, context=context)
out_defaults = super(stock_picking_out, self).default_get(cr, uid, fields_list, context=context)
defaults.update(out_defaults)
return defaults
_columns = {
'backorder_id': fields.many2one('stock.picking.out', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'state': fields.selection(
[('draft', 'Draft'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Deliver'),
('done', 'Delivered'),
('cancel', 'Cancelled'),],
'Status', readonly=True, select=True,
help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Deliver: products reserved, simply waiting for confirmation.\n
* Delivered: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""),
}
_defaults = {
'type': 'out',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| alanjw/GreenOpenERP-Win-X86 | openerp/addons/stock/stock.py | Python | agpl-3.0 | 163,768 |
from django import forms
# future use | DemocracyFoundation/Epitome | Agora/forms.py | Python | agpl-3.0 | 40 |
"""
Block Depth Transformer
"""
from __future__ import absolute_import
from openedx.core.djangoapps.content.block_structure.transformer import BlockStructureTransformer
class BlockDepthTransformer(BlockStructureTransformer):
"""
Keep track of the depth of each block within the block structure. In case
of multiple paths to a given node (in a DAG), use the shallowest depth.
"""
WRITE_VERSION = 1
READ_VERSION = 1
BLOCK_DEPTH = 'block_depth'
def __init__(self, requested_depth=None):
self.requested_depth = requested_depth
@classmethod
def name(cls):
return "blocks_api:block_depth"
@classmethod
def get_block_depth(cls, block_structure, block_key):
"""
Return the precalculated depth of a block within the block_structure:
Arguments:
block_structure: a BlockStructure instance
block_key: the key of the block whose depth we want to know
Returns:
int
"""
return block_structure.get_transformer_block_field(
block_key,
cls,
cls.BLOCK_DEPTH,
)
def transform(self, usage_info, block_structure):
"""
Mutates block_structure based on the given usage_info.
"""
for block_key in block_structure.topological_traversal():
parents = block_structure.get_parents(block_key)
if parents:
block_depth = min(
self.get_block_depth(block_structure, parent_key)
for parent_key in parents
) + 1
else:
block_depth = 0
block_structure.set_transformer_block_field(
block_key,
self,
self.BLOCK_DEPTH,
block_depth
)
if self.requested_depth is not None:
block_structure.remove_block_traversal(
lambda block_key: self.get_block_depth(block_structure, block_key) > self.requested_depth
)
| ESOedX/edx-platform | lms/djangoapps/course_api/blocks/transformers/block_depth.py | Python | agpl-3.0 | 2,059 |
# Copyright 2018 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import ast
from odoo import api, exceptions, models, _
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
@api.model
def _get_priorities(self):
"""
Load priorities from parameters.
:return: dict
"""
key = 'mail.sending.job.priorities'
try:
priorities = ast.literal_eval(
self.env['ir.config_parameter'].sudo().get_param(
key, default='{}'))
# Catch exception to have a understandable error message
except (ValueError, SyntaxError):
raise exceptions.UserError(
_("Error to load the system parameter (%s) "
"of priorities") % key)
# As literal_eval can transform str into any format, check if we
# have a real dict
if not isinstance(priorities, dict):
raise exceptions.UserError(
_("Error to load the system parameter (%s) of priorities.\n"
"Invalid dictionary") % key)
return priorities
@api.multi
def send_mail(self, auto_commit=False):
"""
Set a priority on subsequent generated mail.mail, using priorities
set into the configuration.
:return: dict/action
"""
active_ids = self.env.context.get('active_ids')
default_priority = self.env.context.get('default_mail_job_priority')
if active_ids and not default_priority:
priorities = self._get_priorities()
size = len(active_ids)
limits = [lim for lim in priorities if lim <= size]
if limits:
prio = priorities.get(max(limits))
self = self.with_context(default_mail_job_priority=prio)
return super().send_mail(auto_commit=auto_commit)
| mozaik-association/mozaik | mail_job_priority/wizards/mail_compose_message.py | Python | agpl-3.0 | 1,920 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# res_partner
# Copyright (c) 2013 Codeback Software S.L. (http://codeback.es)
# @author: Miguel García <miguel@codeback.es>
# @author: Javier Fuentes <javier@codeback.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from datetime import datetime, timedelta
from openerp.tools.translate import _
class res_company(osv.osv):
"""añadimos los nuevos campos"""
_name = "res.company"
_inherit = "res.company"
_columns = {
'web_discount': fields.float('Descuento web (%)'),
}
| codeback/openerp-cbk_company_web_discount | res_company.py | Python | agpl-3.0 | 1,385 |
# Copyright (c) 2016 Sebastian Kanis
# This file is part of pi-led-control.
# pi-led-control is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pi-led-control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with pi-led-control. If not, see <http://www.gnu.org/licenses/>.
import datetime
import logging
from server.programs.abstractprogram import AbstractProgram
class ScheduledProgram(AbstractProgram):
def __init__(self, program, timeOfDay):
super().__init__()
self._program = program
self._timeOfDay = timeOfDay
def run(self):
now = datetime.datetime.now()
secondsInCurrentDay = now.hour * 3600 + now.minute * 60 + now.second
if secondsInCurrentDay < self._timeOfDay:
sleepDuration = self._timeOfDay - secondsInCurrentDay
else:
sleepDuration = self._timeOfDay + 3600 * 24 - secondsInCurrentDay
logging.getLogger("main").info("sleeping for " + str(sleepDuration) + " seconds")
self._waitIfNotStopped(sleepDuration)
self._program.run()
def setThreadStopEvent(self, threadStopEvent):
self.threadStopEvent = threadStopEvent
self._program.setThreadStopEvent(threadStopEvent)
def setColorSetter(self, colorSetter):
self._colorSetter = colorSetter
self._program.setColorSetter(colorSetter)
def getCurrentColor(self):
return self._program.getCurrentColor()
def setLastColor(self, lastColor):
self._program.setLastColor(lastColor)
| s0riak/pi-led-control | src/server/programs/scheduledprogram.py | Python | agpl-3.0 | 1,953 |
from pathlib import Path
from inxs.cli import main as _main
from tests import equal_documents
def main(*args):
_args = ()
for arg in args:
if isinstance(arg, Path):
_args += (str(arg),)
else:
_args += (arg,)
_main(_args)
# TODO case-study with this use-case
def test_mods_to_tei(datadir):
main("--inplace", datadir / "mods_to_tei.py", datadir / "mods_to_tei.xml")
assert equal_documents(datadir / "mods_to_tei.xml", datadir / "mods_to_tei_exp.xml")
| funkyfuture/inxs | tests/test_cli.py | Python | agpl-3.0 | 515 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django_pgjson.fields
import django.utils.timezone
import django.db.models.deletion
import djorm_pgarray.fields
import taiga.projects.history.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('users', '0002_auto_20140903_0916'),
]
operations = [
migrations.CreateModel(
name='Membership',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('is_owner', models.BooleanField(default=False)),
('email', models.EmailField(max_length=255, null=True, default=None, verbose_name='email', blank=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='creado el')),
('token', models.CharField(max_length=60, null=True, default=None, verbose_name='token', blank=True)),
('invited_by_id', models.IntegerField(null=True, blank=True)),
],
options={
'ordering': ['project', 'user__full_name', 'user__username', 'user__email', 'email'],
'verbose_name_plural': 'membershipss',
'permissions': (('view_membership', 'Can view membership'),),
'verbose_name': 'membership',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('tags', djorm_pgarray.fields.TextArrayField(dbtype='text', verbose_name='tags')),
('name', models.CharField(max_length=250, unique=True, verbose_name='name')),
('slug', models.SlugField(max_length=250, unique=True, verbose_name='slug', blank=True)),
('description', models.TextField(verbose_name='description')),
('created_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created date')),
('modified_date', models.DateTimeField(verbose_name='modified date')),
('total_milestones', models.IntegerField(null=True, default=0, verbose_name='total of milestones', blank=True)),
('total_story_points', models.FloatField(default=0, verbose_name='total story points')),
('is_backlog_activated', models.BooleanField(default=True, verbose_name='active backlog panel')),
('is_kanban_activated', models.BooleanField(default=False, verbose_name='active kanban panel')),
('is_wiki_activated', models.BooleanField(default=True, verbose_name='active wiki panel')),
('is_issues_activated', models.BooleanField(default=True, verbose_name='active issues panel')),
('videoconferences', models.CharField(max_length=250, null=True, choices=[('appear-in', 'AppearIn'), ('talky', 'Talky'), ('jitsi', 'Jitsi')], verbose_name='videoconference system', blank=True)),
('videoconferences_salt', models.CharField(max_length=250, null=True, verbose_name='videoconference room salt', blank=True)),
('anon_permissions', djorm_pgarray.fields.TextArrayField(choices=[('view_project', 'View project'), ('view_milestones', 'View milestones'), ('view_us', 'View user stories'), ('view_tasks', 'View tasks'), ('view_issues', 'View issues'), ('view_wiki_pages', 'View wiki pages'), ('view_wiki_links', 'View wiki links')], dbtype='text', default=[], verbose_name='anonymous permissions')),
('public_permissions', djorm_pgarray.fields.TextArrayField(choices=[('view_project', 'View project'), ('view_milestones', 'View milestones'), ('view_us', 'View user stories'), ('view_issues', 'View issues'), ('vote_issues', 'Vote issues'), ('view_tasks', 'View tasks'), ('view_wiki_pages', 'View wiki pages'), ('view_wiki_links', 'View wiki links'), ('request_membership', 'Request membership'), ('add_us_to_project', 'Add user story to project'), ('add_comments_to_us', 'Add comments to user stories'), ('add_comments_to_task', 'Add comments to tasks'), ('add_issue', 'Add issues'), ('add_comments_issue', 'Add comments to issues'), ('add_wiki_page', 'Add wiki page'), ('modify_wiki_page', 'Modify wiki page'), ('add_wiki_link', 'Add wiki link'), ('modify_wiki_link', 'Modify wiki link')], dbtype='text', default=[], verbose_name='user permissions')),
('is_private', models.BooleanField(default=False, verbose_name='is private')),
('tags_colors', djorm_pgarray.fields.TextArrayField(dbtype='text', dimension=2, default=[], null=False, verbose_name='tags colors')),
],
options={
'ordering': ['name'],
'verbose_name_plural': 'projects',
'permissions': (('view_project', 'Can view project'),),
'verbose_name': 'project',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='project',
name='members',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, related_name='projects', verbose_name='members', through='projects.Membership'),
preserve_default=True,
),
migrations.AddField(
model_name='project',
name='owner',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, related_name='owned_projects', verbose_name='owner'),
preserve_default=True,
),
migrations.AddField(
model_name='membership',
name='user',
field=models.ForeignKey(blank=True, default=None, to=settings.AUTH_USER_MODEL, null=True, related_name='memberships'),
preserve_default=True,
),
migrations.AddField(
model_name='membership',
name='project',
field=models.ForeignKey(default=1, to='projects.Project', related_name='memberships'),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='membership',
unique_together=set([('user', 'project')]),
),
migrations.AddField(
model_name='membership',
name='role',
field=models.ForeignKey(related_name='memberships', to='users.Role', default=1),
preserve_default=False,
),
]
| 19kestier/taiga-back | taiga/projects/migrations/0001_initial.py | Python | agpl-3.0 | 6,634 |
#!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestHPCP(TestCase):
def testEmpty(self):
hpcp = HPCP()([], [])
self.assertEqualVector(hpcp, [0.]*12)
def testZeros(self):
hpcp = HPCP()([0]*10, [0]*10)
self.assertEqualVector(hpcp, [0.]*12)
def testSin440(self):
# Tests whether a real audio signal of one pure tone gets read as a
# single semitone activation, and gets read into the right pcp bin
sampleRate = 44100
audio = MonoLoader(filename = join(testdata.audio_dir, 'generated/synthesised/sin440_0db.wav'),
sampleRate = sampleRate)()
speaks = SpectralPeaks(sampleRate = sampleRate,
maxPeaks = 1,
maxFrequency = sampleRate/2,
minFrequency = 0,
magnitudeThreshold = 0,
orderBy = 'magnitude')
(freqs, mags) = speaks(Spectrum()(audio))
hpcp = HPCP()(freqs, mags)
self.assertEqualVector(hpcp, [1.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def testAllSemitones(self):
# Tests whether a spectral peak output of 12 consecutive semitones
# yields a HPCP of all 1's
tonic = 440
freqs = [(tonic * 2**(x/12.)) for x in range(12)]
mags = [1] * 12
hpcp = HPCP()(freqs, mags)
self.assertEqualVector(hpcp, [1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.])
def testSubmediantPosition(self):
# Make sure that the submediant of a key based on 440 is in the
# correct location (submediant was randomly selected from all the
# tones)
tonic = 440
submediant = tonic * 2**(9./12.)
hpcp = HPCP()([submediant], [1])
self.assertEqualVector(hpcp, [0.,0.,0.,0.,0.,0.,0.,0.,0.,1.,0.,0.])
def testMaxShifted(self):
# Tests whether a HPCP reading with only the dominant semitone
# activated is correctly shifted so that the dominant is at the
# position 0
tonic = 440
dominant = tonic * 2**(7./12.)
hpcp = HPCP(maxShifted=True)([dominant], [1])
self.assertEqualVector(hpcp, [1.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def chordHelper(self, half_steps, tunning, strength):
notes = [tunning*(2.**(half_steps[i]/12.)) for i in range(len(half_steps))]
hpcp = HPCP(maxShifted=False)([notes[0], notes[1], notes[2]], strength)
for i in range(len(hpcp)):
if i in half_steps: self.assertTrue(hpcp[i]>0)
elif (i - 12) in half_steps: self.assertTrue(hpcp[i]>0)
else: self.assertEqual(hpcp[i], 0)
def testChord(self):
tunning = 440
AMajor = [0, 4, 7] # AMajor = A4-C#5-E5
self.chordHelper(AMajor, tunning, [1,1,1])
CMajor = [3, -4, -2] # CMajor = C5-F4-G4
self.chordHelper(CMajor, tunning, [1,1,1])
CMajor = [-4, 3, -2] # CMajor = C5-F4-G4
self.chordHelper(CMajor, tunning, [1,0.5,0.2])
CMajor = [-4, -2, 3] # CMajor = C5-F4-G4
self.chordHelper(CMajor, tunning, [1,0.5,0.2])
CMajor = [3, 8, 10] # CMajor = C5-F5-G5
self.chordHelper(CMajor, tunning, [1,0.5,0.2])
AMinor = [0, 3, 7] # AMinor = A4-C5-E5
self.chordHelper(AMinor, tunning, [1,0.5,0.2])
CMinor = [3, 6, 10] # CMinor = C5-E5-G5
self.chordHelper(CMinor, tunning, [1,0.5,0.2])
# Test of various parameter logical bounds
def testLowFrequency(self):
hpcp = HPCP(minFrequency=100, maxFrequency=1000)([99], [1])
self.assertEqualVector(hpcp, [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def testHighFrequency(self):
hpcp = HPCP(minFrequency=100, maxFrequency=1000)([1001], [1])
self.assertEqualVector(hpcp, [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def testSmallMinRange(self):
self.assertConfigureFails(HPCP(), {'minFrequency':1, 'splitFrequency':200})
def testSmallMaxRange(self):
self.assertConfigureFails(HPCP(), {'maxFrequency':1199, 'splitFrequency':1000})
def testSmallMinMaxRange(self):
self.assertConfigureFails(HPCP(), {'bandPreset':False, 'maxFrequency':200, 'minFrequency':1})
def testSizeNonmultiple12(self):
self.assertConfigureFails(HPCP(), {'size':13})
def testHarmonics(self):
# Regression test for the 'harmonics' parameter
tone = 100. # arbitrary frequency [Hz]
freqs = [tone, tone*2, tone*3, tone*4]
mags = [1]*4
hpcpAlg = HPCP(minFrequency=50, maxFrequency=500, bandPreset=False, harmonics=3)
hpcp = hpcpAlg(freqs, mags)
expected = [0., 0., 0., 0.1340538263, 0., 0.2476127148, 0., 0., 0., 0., 1., 0.]
self.assertAlmostEqualVector(hpcp, expected, 1e-4)
def testRegression(self):
# Just makes sure algorithm does not crash on a real data source. This
# test is not really looking for correctness. Maybe consider revising
# it.
inputSize = 512
sampleRate = 44100
audio = MonoLoader(filename = join(testdata.audio_dir, join('recorded', 'musicbox.wav')),
sampleRate = sampleRate)()
fc = FrameCutter(frameSize = inputSize,
hopSize = inputSize)
windowingAlg = Windowing(type = 'blackmanharris62')
specAlg = Spectrum(size=inputSize)
sPeaksAlg = SpectralPeaks(sampleRate = sampleRate,
maxFrequency = sampleRate/2,
minFrequency = 0,
orderBy = 'magnitude')
hpcpAlg = HPCP(minFrequency=50, maxFrequency=500, bandPreset=False, harmonics=3)
frame = fc(audio)
while len(frame) != 0:
spectrum = specAlg(windowingAlg(frame))
(freqs, mags) = sPeaksAlg(spectrum)
hpcp = hpcpAlg(freqs,mags)
self.assertTrue(not any(numpy.isnan(hpcp)))
self.assertTrue(not any(numpy.isinf(hpcp)))
frame = fc(audio)
suite = allTests(TestHPCP)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| arseneyr/essentia | test/src/unittest/spectral/test_hpcp.py | Python | agpl-3.0 | 7,101 |
# -*- coding: utf-8 -*-
# Copyright 2016 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'OAuth2 Disable Login with Odoo.com',
'version': '10.0.1.0.0',
'category': 'Tools',
'author': 'Onestein',
'license': 'AGPL-3',
'depends': ['auth_oauth'],
'data': [
'data/auth_oauth_data.xml',
],
}
| VitalPet/addons-onestein | auth_oauth_disable_login_with_odoo/__manifest__.py | Python | agpl-3.0 | 394 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-11-22 07:11
from __future__ import unicode_literals
from django.core.management.sql import emit_post_migrate_signal
from django.db import migrations
def add_executive_group(apps, schema_editor):
# create group
db_alias = schema_editor.connection.alias
emit_post_migrate_signal(1, False, db_alias)
Group = apps.get_model('auth', 'Group')
Permission = apps.get_model('auth', 'Permission')
executive_group, created = Group.objects.get_or_create(name='executive')
if created:
# Learning unit
can_access_learningunit = Permission.objects.get(codename='can_access_learningunit')
executive_group.permissions.add(can_access_learningunit)
class Migration(migrations.Migration):
dependencies = [
('base', '0207_auto_20171220_1035'),
]
operations = [
migrations.RunPython(add_executive_group, elidable=True),
]
| uclouvain/OSIS-Louvain | base/migrations/0208_create_role_executive.py | Python | agpl-3.0 | 951 |
# -*- coding: utf-8 -*-
from openerp import models, fields
class AccountBankStatementLine(models.Model):
_inherit = "account.bank.statement.line"
name = fields.Char(
string='Memo',
required=False,
default="",
)
| houssine78/addons | account_bank_statement_line_memo/models/models.py | Python | agpl-3.0 | 251 |
# -*- coding: utf-8 -*-
# Etalage -- Open Data POIs portal
# By: Emmanuel Raviart <eraviart@easter-eggs.com>
#
# Copyright (C) 2011, 2012 Easter-eggs
# http://gitorious.org/infos-pratiques/etalage
#
# This file is part of Etalage.
#
# Etalage is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Etalage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Context loaded and saved in WSGI requests"""
import gettext
import webob
from . import conf
__all__ = ['Ctx', 'null_ctx']
class Ctx(object):
_parent = None
default_values = dict(
_lang = None,
_scopes = UnboundLocalError,
_translator = None,
base_categories_slug = None,
category_tags_slug = None,
container_base_url = None,
distance = None, # Max distance in km
gadget_id = None,
hide_directory = False,
req = None,
subscriber = None,
)
env_keys = ('_lang', '_scopes', '_translator')
def __init__(self, req = None):
if req is not None:
self.req = req
etalage_env = req.environ.get('etalage', {})
for key in object.__getattribute__(self, 'env_keys'):
value = etalage_env.get(key)
if value is not None:
setattr(self, key, value)
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
parent = object.__getattribute__(self, '_parent')
if parent is None:
default_values = object.__getattribute__(self, 'default_values')
if name in default_values:
return default_values[name]
raise
return getattr(parent, name)
@property
def _(self):
return self.translator.ugettext
def blank_req(self, path, environ = None, base_url = None, headers = None, POST = None, **kw):
env = environ.copy() if environ else {}
etalage_env = env.setdefault('etalage', {})
for key in self.env_keys:
value = getattr(self, key)
if value is not None:
etalage_env[key] = value
return webob.Request.blank(path, environ = env, base_url = base_url, headers = headers, POST = POST, **kw)
def get_containing(self, name, depth = 0):
"""Return the n-th (n = ``depth``) context containing attribute named ``name``."""
ctx_dict = object.__getattribute__(self, '__dict__')
if name in ctx_dict:
if depth <= 0:
return self
depth -= 1
parent = ctx_dict.get('_parent')
if parent is None:
return None
return parent.get_containing(name, depth = depth)
def get_inherited(self, name, default = UnboundLocalError, depth = 1):
ctx = self.get_containing(name, depth = depth)
if ctx is None:
if default is UnboundLocalError:
raise AttributeError('Attribute %s not found in %s' % (name, self))
return default
return object.__getattribute__(ctx, name)
def iter(self):
yield self
parent = object.__getattribute__(self, '_parent')
if parent is not None:
for ancestor in parent.iter():
yield ancestor
def iter_containing(self, name):
ctx_dict = object.__getattribute__(self, '__dict__')
if name in ctx_dict:
yield self
parent = ctx_dict.get('_parent')
if parent is not None:
for ancestor in parent.iter_containing(name):
yield ancestor
def iter_inherited(self, name):
for ctx in self.iter_containing(name):
yield object.__getattribute__(ctx, name)
def lang_del(self):
del self._lang
if self.req is not None and self.req.environ.get('etalage') is not None \
and '_lang' in self.req.environ['etalage']:
del self.req.environ['etalage']['_lang']
def lang_get(self):
if self._lang is None:
# self._lang = self.req.accept_language.best_matches('en-US') if self.req is not None else []
# Note: Don't forget to add country-less language code when only a "language-COUNTRY" code is given.
self._lang = ['fr-FR', 'fr']
if self.req is not None:
self.req.environ.setdefault('etalage', {})['_lang'] = self._lang
return self._lang
def lang_set(self, lang):
self._lang = lang
if self.req is not None:
self.req.environ.setdefault('etalage', {})['_lang'] = self._lang
# Reinitialize translator for new languages.
if self._translator is not None:
# Don't del self._translator, because attribute _translator can be defined in a parent.
self._translator = None
if self.req is not None and self.req.environ.get('etalage') is not None \
and '_translator' in self.req.environ['etalage']:
del self.req.environ['etalage']['_translator']
lang = property(lang_get, lang_set, lang_del)
def new(self, **kwargs):
ctx = Ctx()
ctx._parent = self
for name, value in kwargs.iteritems():
setattr(ctx, name, value)
return ctx
@property
def parent(self):
return object.__getattribute__(self, '_parent')
def scopes_del(self):
del self._scopes
if self.req is not None and self.req.environ.get('wenoit_etalage') is not None \
and '_scopes' in self.req.environ['wenoit_etalage']:
del self.req.environ['wenoit_etalage']['_scopes']
def scopes_get(self):
return self._scopes
def scopes_set(self, scopes):
self._scopes = scopes
if self.req is not None:
self.req.environ.setdefault('wenoit_etalage', {})['_scopes'] = scopes
scopes = property(scopes_get, scopes_set, scopes_del)
@property
def session(self):
return self.req.environ.get('beaker.session') if self.req is not None else None
@property
def translator(self):
"""Get a valid translator object from one or several languages names."""
if self._translator is None:
languages = self.lang
if not languages:
return gettext.NullTranslations()
if not isinstance(languages, list):
languages = [languages]
translator = gettext.NullTranslations()
i18n_dir_by_plugin_name = conf['i18n_dir_by_plugin_name'] or {}
for name, i18n_dir in [
('biryani', conf['biryani_i18n_dir']),
(conf['package_name'], conf['i18n_dir']),
] + sorted(i18n_dir_by_plugin_name.iteritems()):
if name is not None and i18n_dir is not None:
translator = new_translator(name, i18n_dir, languages, fallback = translator)
self._translator = translator
return self._translator
null_ctx = Ctx()
null_ctx.lang = ['fr-FR', 'fr']
def new_translator(domain, localedir, languages, fallback = None):
new = gettext.translation(domain, localedir, fallback = True, languages = languages)
if fallback is not None:
new.add_fallback(fallback)
return new
| Gentux/etalage | etalage/contexts.py | Python | agpl-3.0 | 7,875 |
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect, Http404
from django.db.models import Q
from django.contrib import messages
from cc.general.util import render
import cc.ripple.api as ripple
from cc.profile.models import Profile
from cc.relate.forms import EndorseForm, AcknowledgementForm
from cc.relate.models import Endorsement
from cc.feed.models import FeedItem
from cc.general.mail import send_notification
from django.utils.translation import ugettext as _
MESSAGES = {
'endorsement_saved': _("Endorsement saved."),
'endorsement_deleted': _("Endorsement deleted."),
'acknowledgement_sent': _("Acknowledgement sent."),
}
@login_required
@render()
def endorse_user(request, recipient_username):
recipient = get_object_or_404(Profile, user__username=recipient_username)
if recipient == request.profile:
raise Http404()
try:
endorsement = Endorsement.objects.get(
endorser=request.profile, recipient=recipient)
except Endorsement.DoesNotExist:
endorsement = None
if request.method == 'POST':
if 'delete' in request.POST and endorsement:
endorsement.delete()
messages.info(request, MESSAGES['endorsement_deleted'])
return HttpResponseRedirect(
endorsement.recipient.get_absolute_url())
form = EndorseForm(request.POST, instance=endorsement,
endorser=request.profile, recipient=recipient)
if form.is_valid():
is_new = endorsement is None
endorsement = form.save()
if is_new:
send_endorsement_notification(endorsement)
messages.info(request, MESSAGES['endorsement_saved'])
return HttpResponseRedirect(endorsement.get_absolute_url())
else:
form = EndorseForm(instance=endorsement, endorser=request.profile,
recipient=recipient)
profile = recipient # For profile_base.html.
return locals()
def send_endorsement_notification(endorsement):
subject = _("%s has endorsed you on Villages.cc") % endorsement.endorser
send_notification(subject, endorsement.endorser, endorsement.recipient,
'endorsement_notification_email.txt',
{'endorsement': endorsement})
@login_required
@render()
def endorsement(request, endorsement_id):
endorsement = get_object_or_404(Endorsement, pk=endorsement_id)
return locals()
@login_required
@render()
def relationships(request):
accounts = ripple.get_user_accounts(request.profile)
return locals()
@login_required
@render()
def relationship(request, partner_username):
partner = get_object_or_404(Profile, user__username=partner_username)
if partner == request.profile:
raise Http404 # Can't have relationship with yourself.
account = request.profile.account(partner)
if account:
entries = account.entries
balance = account.balance
else:
entries = []
balance = 0
profile = partner # For profile_base.html.
return locals()
@login_required
@render()
def acknowledge_user(request, recipient_username):
recipient = get_object_or_404(Profile, user__username=recipient_username)
if recipient == request.profile:
raise Http404
# TODO: Don't recompute max_amount on form submit? Cache, or put in form
# as hidden field?
max_amount = ripple.max_payment(request.profile, recipient)
if request.method == 'POST':
form = AcknowledgementForm(request.POST, max_ripple=max_amount)
if form.is_valid():
acknowledgement = form.send_acknowledgement(
request.profile, recipient)
send_acknowledgement_notification(acknowledgement)
messages.info(request, MESSAGES['acknowledgement_sent'])
return HttpResponseRedirect(acknowledgement.get_absolute_url())
else:
form = AcknowledgementForm(max_ripple=max_amount, initial=request.GET)
can_ripple = max_amount > 0
profile = recipient # For profile_base.html.
return locals()
def send_acknowledgement_notification(acknowledgement):
subject = _("%s has acknowledged you on Villages.cc") % (
acknowledgement.payer)
send_notification(subject, acknowledgement.payer, acknowledgement.recipient,
'acknowledgement_notification_email.txt',
{'acknowledgement': acknowledgement})
@login_required
@render()
def view_acknowledgement(request, payment_id):
try:
payment = ripple.get_payment(payment_id)
except ripple.RipplePayment.DoesNotExist:
raise Http404
entries = payment.entries_for_user(request.profile)
if not entries:
raise Http404 # Non-participants don't get to see anything.
sent_entries = []
received_entries = []
for entry in entries:
if entry.amount < 0:
sent_entries.append(entry)
else:
received_entries.append(entry)
return locals()
| rfugger/villagescc | cc/relate/views.py | Python | agpl-3.0 | 5,122 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import waybill
import wizard
import travel
import vehicle
import requirement
import res_partner
import waybill_expense
import account_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| adhoc-dev/odoo-logistic | addons/logistic_x/__init__.py | Python | agpl-3.0 | 1,161 |
# Copyright 2015-2018 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
from . import mass_reconcile
from . import advanced_reconciliation
| OCA/bank-statement-reconcile | account_mass_reconcile_ref_deep_search/models/__init__.py | Python | agpl-3.0 | 171 |
# -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import invoice
| Elico-Corp/openerp-7.0 | sale_bom_split_anglo_saxon/__init__.py | Python | agpl-3.0 | 158 |
import factory
from .models import User
USER_PASSWORD = "2fast2furious"
class UserFactory(factory.DjangoModelFactory):
name = "John Doe"
email = factory.Sequence(lambda n: "john{}@example.com".format(n))
password = factory.PostGenerationMethodCall('set_password', USER_PASSWORD)
gender = "male"
class Meta:
model = User
| ballotify/django-backend | ballotify/apps/accounts/factories.py | Python | agpl-3.0 | 353 |
#!/usr/bin/env python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
#
# NetProfile: Authentication routines
# © Copyright 2013-2014 Alex 'Unik' Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division
)
import hashlib
import random
import string
import time
from zope.interface import implementer
from pyramid.interfaces import IAuthenticationPolicy
from pyramid.security import (
Authenticated,
Everyone
)
class PluginPolicySelected(object):
def __init__(self, request, policy):
self.request = request
self.policy = policy
@implementer(IAuthenticationPolicy)
class PluginAuthenticationPolicy(object):
def __init__(self, default, routes=None):
self._default = default
if routes is None:
routes = {}
self._routes = routes
def add_plugin(self, route, policy):
self._routes[route] = policy
def match(self, request):
if hasattr(request, 'auth_policy'):
return request.auth_policy
cur = None
cur_len = 0
for route, plug in self._routes.items():
r_len = len(route)
if r_len <= cur_len:
continue
path = request.path
if route == path[:r_len]:
if len(path) > r_len:
if path[r_len:r_len + 1] != '/':
continue
cur = plug
cur_len = r_len
if cur:
request.auth_policy = cur
else:
request.auth_policy = self._default
request.registry.notify(PluginPolicySelected(request, request.auth_policy))
return request.auth_policy
def authenticated_userid(self, request):
return self.match(request).authenticated_userid(request)
def unauthenticated_userid(self, request):
return self.match(request).unauthenticated_userid(request)
def effective_principals(self, request):
return self.match(request).effective_principals(request)
def remember(self, request, principal, **kw):
return self.match(request).remember(request, principal, **kw)
def forget(self, request):
return self.match(request).forget(request)
_TOKEN_FILTER_MAP = (
[chr(n) for n in range(32)] +
[chr(127), '\\', '"']
)
_TOKEN_FILTER_MAP = dict.fromkeys(_TOKEN_FILTER_MAP, None)
def _filter_token(tok):
return str(tok).translate(_TOKEN_FILTER_MAP)
def _format_kvpairs(**kwargs):
return ', '.join('{0!s}="{1}"'.format(k, _filter_token(v)) for (k, v) in kwargs.items())
def _generate_nonce(ts, secret, salt=None, chars=string.hexdigits.upper()):
# TODO: Add IP-address to nonce
if not salt:
try:
rng = random.SystemRandom()
except NotImplementedError:
rng = random
salt = ''.join(rng.choice(chars) for i in range(16))
ctx = hashlib.md5(('%s:%s:%s' % (ts, salt, secret)).encode())
return ('%s:%s:%s' % (ts, salt, ctx.hexdigest()))
def _is_valid_nonce(nonce, secret):
comp = nonce.split(':')
if len(comp) != 3:
return False
calc_nonce = _generate_nonce(comp[0], secret, comp[1])
if nonce == calc_nonce:
return True
return False
def _generate_digest_challenge(ts, secret, realm, opaque, stale=False):
nonce = _generate_nonce(ts, secret)
return 'Digest %s' % (_format_kvpairs(
realm=realm,
qop='auth',
nonce=nonce,
opaque=opaque,
algorithm='MD5',
stale='true' if stale else 'false'
),)
def _add_www_authenticate(request, secret, realm):
resp = request.response
if not resp.www_authenticate:
resp.www_authenticate = _generate_digest_challenge(
round(time.time()),
secret, realm, 'NPDIGEST'
)
def _parse_authorization(request, secret, realm):
authz = request.authorization
if (not authz) or (len(authz) != 2) or (authz[0] != 'Digest'):
_add_www_authenticate(request, secret, realm)
return None
params = authz[1]
if 'algorithm' not in params:
params['algorithm'] = 'MD5'
for required in ('username', 'realm', 'nonce', 'uri', 'response', 'cnonce', 'nc', 'opaque'):
if (required not in params) or ((required == 'opaque') and (params['opaque'] != 'NPDIGEST')):
_add_www_authenticate(request, secret, realm)
return None
return params
@implementer(IAuthenticationPolicy)
class DigestAuthenticationPolicy(object):
def __init__(self, secret, callback, realm='Realm'):
self.secret = secret
self.callback = callback
self.realm = realm
def authenticated_userid(self, request):
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return None
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return None
userid = params['username']
if self.callback(params, request) is not None:
return 'u:%s' % userid
_add_www_authenticate(request, self.secret, self.realm)
def unauthenticated_userid(self, request):
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return None
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return None
return 'u:%s' % params['username']
def effective_principals(self, request):
creds = [Everyone]
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return creds
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return creds
groups = self.callback(params, request)
if groups is None:
return creds
creds.append(Authenticated)
creds.append('u:%s' % params['username'])
creds.extend(groups)
return creds
def remember(self, request, principal, *kw):
return []
def forget(self, request):
return [('WWW-Authenticate', _generate_digest_challenge(
round(time.time()),
self.secret,
self.realm,
'NPDIGEST'
))]
| annndrey/npui-unik | netprofile/netprofile/common/auth.py | Python | agpl-3.0 | 6,265 |
from . import models
from . import lroe
| factorlibre/l10n-spain | l10n_es_ticketbai_api_batuz/__init__.py | Python | agpl-3.0 | 40 |
#!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
from essentia.streaming import TCToTotal as sTCToTotal
class TestTCToTotal(TestCase):
def testEmpty(self):
gen = VectorInput([])
tcToTotal = sTCToTotal()
p = Pool()
gen.data >> tcToTotal.envelope
tcToTotal.TCToTotal >> (p, 'lowlevel.tctototal')
run(gen)
self.assertRaises(KeyError, lambda: p['lowlevel.tctototal'])
def testOneValue(self):
gen = VectorInput([1])
tcToTotal = sTCToTotal()
p = Pool()
gen.data >> tcToTotal.envelope
tcToTotal.TCToTotal >> (p, 'lowlevel.tctototal')
self.assertRaises(RuntimeError, lambda: run(gen))
def testRegression(self):
envelope = range(22050)
envelope.reverse()
envelope = range(22050) + envelope
gen = VectorInput(envelope)
tcToTotal = sTCToTotal()
p = Pool()
gen.data >> tcToTotal.envelope
tcToTotal.TCToTotal >> (p, 'lowlevel.tctototal')
run(gen)
self.assertAlmostEqual(p['lowlevel.tctototal'],
TCToTotal()(envelope))
suite = allTests(TestTCToTotal)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| arseneyr/essentia | test/src/unittest/sfx/test_tctototal_streaming.py | Python | agpl-3.0 | 2,098 |
# Copyright 2015 ACSONE SA/NV
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class PosConfig(models.Model):
_inherit = "pos.config"
account_analytic_id = fields.Many2one(
comodel_name="account.analytic.account", string="Analytic Account"
)
| OCA/account-analytic | pos_analytic_by_config/models/pos_config.py | Python | agpl-3.0 | 318 |
#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
import os
from daemon import Daemon
from time import sleep
from StringIO import StringIO
from traceback import print_exc
from skarphedcore.configuration import Configuration
from skarphedcore.database import Database
from skarphedcore.core import Core
from skarphedcore.module import Module
from common.errors import OperationException
class Operation(object):
"""
Contais everything necessary to Handle Operations
"""
STATUS_PENDING = 0
STATUS_ACTIVE = 1
STATUS_FAILED = 2
VALID_STORAGE_TYPES = ('int','bool','str','unicode')
def __init__(self, parent_id = None):
"""
"""
self._id = None
self._parent = parent_id
self._values = {}
@classmethod
def drop_operation(cls,operation_id):
"""
Drops an Operation, identified by it's Operation Id and
it's children recursively
Drop deletes the Operations from Database
"""
db = Database()
stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS IN (0, 2) ;"
cur = db.query(stmnt,(operation_id,))
for row in cur.fetchallmap():
cls.drop_operation(row["OPE_ID"])
stmnt = "DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS IN (0, 2) ;"
db.query(stmnt,(operation_id,),commit=True)
@classmethod
def retry_operation(cls,operation_id):
"""
Resets the state of an operation and it's children recursively to 0 (PENDING)
The operation is identified by a given operationId
"""
db = Database()
stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;"
cur = db.query(stmnt,(operation_id,))
for row in cur.fetchallmap():
cls.retry_operation(row["OPE_ID"])
stmnt = "UPDATE OPERATIONS SET OPE_STATUS = 0 WHERE OPE_ID = ? AND OPE_STATUS = 2 ;"
db.query(stmnt,(operation_id,),commit=True)
@classmethod
def cancel_operation(cls,operation_id):
"""
Cancels an Operation, identified by it's Operation Id and
it's children recursively
Cancel Deletes the Operation from Database
"""
db = Database()
stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;"
cur = db.query(stmnt,(operation_id,))
for row in cur.fetchallmap():
cls.cancel_operation(row["OPE_ID"])
stmnt = "DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS = 0 ;"
db.query(stmnt,(operation_id,),commit=True)
@classmethod
def restore_operation(cls, operation_record):
"""
Restore an Operationobject stored in the database by a Dataset consisting of
the operation's ID and the operation's TYPE:
For example: {"OPE_ID": 100, "OPE_TYPE": "TestOperation"}
Restores the Operationobject's _values-attribute by the data saved
in the DB-Table OPERATIONDATA
"""
classname = operation_record["OPE_TYPE"]
module = "" #TODO Implement modulename from database if Operation belongs to Module
is_operation_of_module = False
exec """
try:
type(%(class)s)
except NameError,e:
is_operation_of_module = True"""%{'class':classname}
if is_operation_of_module:
exec """
from %(module)s import %(class)s
operation = %(class)s()"""%{'class':classname,'module':module}
else:
exec """
operation = %(class)s()"""%{'class':classname}
operation.set_id(operation_record['OPE_ID'])
db = Database()
stmnt = "SELECT OPD_KEY, OPD_VALUE, OPD_TYPE FROM OPERATIONDATA WHERE OPD_OPE_ID = ? ;"
cur = db.query(stmnt,(operation_record["OPE_ID"],))
for row in cur.fetchallmap():
val = row["OPD_VALUE"]
exec """val = %s(val)"""%row["OPD_TYPE"]
operation.set_value(row["OPD_KEY"], val)
return operation
@classmethod
def process_children(cls, operation):
"""
Recursively executes the workloads of Operation's Childoperations
It hereby catches exceptions in the workloads, sets the OPE_STATUS
to 2 (FAILED) if a catch occurs, then passes the exception on to the
higher layer.
If an Operation succeeds, it's entry in DB gets deleted
"""
db = Database()
stmnt = "SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT = ? ORDER BY OPE_INVOKED ;"
stmnt_lock = "UPDATE OPERATIONS SET OPE_STATUS = 1 WHERE OPE_ID = ? ;"
cur = db.query(stmnt,(operation.get_id(),))
for row in cur.fetchallmap():
child_operation = cls.restore_operation(row)
db.query(stmnt_lock,(child_operation.get_id(),),commit=True)
try:
cls.process_children(child_operation)
child_operation.do_workload()
except Exception,e:
stmnt_err = "UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;"
db.query(stmnt_err,(int(row["OPE_ID"]),),commit=True)
#TODO GENERATE ERROR IN LOG
raise e
stmnt_delete = "DELETE FROM OPERATIONS WHERE OPE_ID = ?;"
db.query(stmnt_delete,(child_operation.get_id(),),commit=True)
@classmethod
def process_next(cls):
"""
Sets the status of the next toplevel operation to 1 (ACTIVE)
Fetches the next toplevel-operation from the database, applies a FILESYSTEMLOCK!
Which is /tmp/scv_operating.lck !!!
"""
db = Database()
configuration = Configuration()
if os.path.exists(configuration.get_entry("core.webpath")+"/scv_operating.lck"):
return False
lockfile = open(configuration.get_entry("core.webpath")+"/scv_operating.lck","w")
lockfile.close()
stmnt_lock = "UPDATE OPERATIONS SET OPE_STATUS = 1 \
WHERE OPE_ID IN ( \
SELECT OPE_ID FROM OPERATIONS \
WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0 \
AND OPE_INVOKED = ( \
SELECT MIN(OPE_INVOKED) FROM OPERATIONS \
WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0) \
) ;"
stmnt = "SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 1 ;"
db.query(stmnt_lock,commit=True)
cur = db.query(stmnt)
res = cur.fetchallmap()
if len(res) > 0:
operation = cls.restore_operation(res[0])
try:
cls.process_children(operation)
operation.do_workload()
except Exception, e:
stmnt_err = "UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;"
db.query(stmnt_err,(operation.get_id(),),commit=True)
error = StringIO()
print_exc(None,error)
Core().log(error.getvalue())
ret = True
else:
ret = False
stmnt_delete = "DELETE FROM OPERATIONS WHERE OPE_STATUS = 1 ;"
db.query(stmnt_delete,commit=True)
db.commit()
try:
os.unlink(configuration.get_entry("core.webpath")+"/scv_operating.lck")
except OSError,e :
raise OperationException(OperationException.get_msg(0))
return ret
@classmethod
def get_current_operations_for_gui(cls, operation_types=None):
"""
Returns all Operations in an associative array.
The array's indices are the operationIDs
The Objects contain all information about the operations,
including the Data
"""
db = Database()
#TODO CHECK HOW LISTS ARE HANDLED IN FDB
if operation_types is not None and type(operation_types) == list:
stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE, OPE_STATUS FROM OPERATIONS WHERE OPE_TYPE IN (?) ORDER BY OPE_INVOKED ;"
cur = db.query(stmnt,(operation_types))
else:
stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE, OPE_STATUS FROM OPERATIONS ORDER BY OPE_INVOKED ;"
cur = db.query(stmnt)
ret = {}
for row in cur.fetchallmap():
operation = cls.restore_operation(row)
custom_values = operation.get_values()
ret[row["OPE_ID"]] = {"id":row["OPE_ID"],
"parent":row["OPE_OPE_PARENT"],
"invoked":str(row["OPE_INVOKED"]),
"type":row["OPE_TYPE"],
"status":row["OPE_STATUS"],
"data":custom_values}
return ret
def get_values(self):
"""
trivial
"""
return self._values
def get_value(self,key):
"""
trivial
"""
return self._values(key)
def set_value(self,key,value):
"""
trivial
"""
self._values[key] = value
def set_parent(self,parent_id):
"""
trivial
"""
self._parent = parent_id
def get_parent(self):
"""
trivial
"""
return self._parent
def set_db_id(self):
"""
Get a new Operation Id from the Database and assign it to this
Operation if this Operation's id is null. Afterwards return the
new Id
"""
if self._id is None:
self._id = Database().get_seq_next('OPE_GEN')
return self._id
def set_id(self, nr):
"""
trivial
"""
self._id = nr
def get_id(self):
"""
trivial
"""
return self._id
def store(self):
"""
Stores this Operation to database.
Also saves every user defined value in $_values as
long as it is a valid type
"""
db = Database()
self.set_db_id()
stmnt = "UPDATE OR INSERT INTO OPERATIONS (OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE) \
VALUES (?,?,CURRENT_TIMESTAMP,?) MATCHING (OPE_ID);"
db.query(stmnt,(self._id,self._parent,self.__class__.__name__),commit=True)
stmnt = "UPDATE OR INSERT INTO OPERATIONDATA (OPD_OPE_ID, OPD_KEY, OPD_VALUE, OPD_TYPE) \
VALUES ( ?, ?, ?, ?) MATCHING(OPD_OPE_ID,OPD_KEY);"
for key, value in self._values.items():
typ = str(type(value)).replace("<type '","",1).replace("'>","",1)
if typ not in Operation.VALID_STORAGE_TYPES:
continue
db.query(stmnt,(self._id,key,value,typ),commit=True)
def do_workload(self):
"""
This method must be overridden by inheriting classes.
The code inside this method will be executed when the
Operation is processed by Operation.processNext or
Operation.processChild
"""
pass
#MODULEINVOLVED
class ModuleOperation(Operation):
"""
Abstracts Operations that have to do with modules
"""
def __init__(self):
"""
trivial
"""
Operation.__init__(self)
def set_values(self,module):
"""
Sets this operations values from module metadata
"""
if type(module) == dict:
self.set_value("name",module["name"])
self.set_value("hrname",module["hrname"])
self.set_value("version_major",module["version_major"])
self.set_value("version_minor",module["version_minor"])
self.set_value("revision",module["revision"])
if module.has_key("signature"):
self.set_value("signature",module["signature"])
elif module.__class__.__name__ == "Module":
pass #TODO IMPLEMENT / DISCUSS AFTER IMPLEMENTING MODULE-SUBSYSTEM
def get_meta(self):
"""
trivial
"""
return self._values
@classmethod
def get_currently_processed_modules(cls):
"""
Returns an Array of ModuleOperation-Objects that are
currently listedin the queue
"""
db = Database()
stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_TYPE FROM OPERATIONS \
WHERE OPE_TYPE = 'ModuleInstallOperation' \
or OPE_TYPE = 'ModuleUninstallOperation' ;"
cur = db.query(stmnt);
ret = []
for row in cur.fetchallmap():
ret.append(Operation.restore_operation(row).get_meta())
return ret
def optimize_queue(self):
"""
abtract
"""
pass
#MODULEINVOLVED
class ModuleInstallOperation(ModuleOperation):
"""
Manages the process to install a module to this server
"""
def __init__(self):
"""
trivial
"""
ModuleOperation.__init__(self)
def do_workload(self):
"""
tell the module manager to install a specific module.
"""
Module.install_module(self.get_meta())
def optimize_queue(self):
"""
optimizes the queue.
"""
pass #TODO Implement
#MODULEINVOLVED
class ModuleUninstallOperation(ModuleOperation):
"""
Manages the process to uninstall a module to this server
"""
def __init__(self):
"""
trivial
"""
ModuleOperation.__init__(self)
def do_workload(self):
"""
tell the module manager to install a specific module.
"""
module = Module.get_module_by_name(self._values["name"])
module_manager.uninstall_module(module)
def optimize_queue(self):
"""
optimizes the queue.
"""
pass #TODO Implement
#MODULEINVOLVED
class ModuleUpdateOperation(ModuleOperation):
"""
Manages the process to uninstall a module to this server
"""
def __init__(self):
"""
trivial
"""
ModuleOperation.__init__(self)
def do_workload(self):
"""
tell the module manager to install a specific module.
"""
module = Module.get_module_by_name(self._values["name"])
module_manager.update_module(module)
def optimize_queue(self):
"""
optimizes the queue.
"""
pass #TODO Implement
class FailOperation(Operation):
"""
For unittest purposes: An Operation that always fails
"""
def __init__(self):
"""
trivial
"""
Operation.__init__(self)
def do_workload(self):
"""
simply fail
"""
raise Exception("Failoperation failed")
class TestOperation(Operation):
"""
For unittest purposes: An Operation that always succeds
"""
def __init__(self):
"""
trivial
"""
Operation.__init__(self)
def do_workload(self):
"""
simply succeed
"""
pass
class OperationDaemon(Daemon):
"""
This is the deamon that runs to actually execute the scheduled operations
"""
def __init__(self, pidfile):
"""
Initialize the deamon
"""
Daemon.__init__(self,pidfile)
def stop(self):
configuration = Configuration()
if os.path.exists(configuration.get_entry("core.webpath")+"/scv_operating.lck"):
os.remove(configuration.get_entry("core.webpath")+"/scv_operating.lck")
Daemon.stop(self)
def run(self):
"""
Do work if there is work to do, otherwise check every two seconds for new work.
"""
while True:
while Operation.process_next():
pass
sleep(2)
| skarphed/skarphed | core/lib/operation.py | Python | agpl-3.0 | 16,724 |
"""
Application file for the code snippets app.
"""
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class SnippetsConfig(AppConfig):
"""
Application configuration class for the code snippets app.
"""
name = 'apps.snippets'
verbose_name = _('Code snippets')
| TamiaLab/carnetdumaker | apps/snippets/apps.py | Python | agpl-3.0 | 323 |
# -*- encoding: utf-8 -*-
from . import res_partner_bank
from . import account_bank_statement_import
| StefanRijnhart/bank-statement-import | account_bank_statement_import/__init__.py | Python | agpl-3.0 | 102 |
"""
Tests course_creators.admin.py.
"""
from django.test import TestCase
from django.contrib.auth.models import User
from django.contrib.admin.sites import AdminSite
from django.http import HttpRequest
import mock
from course_creators.admin import CourseCreatorAdmin
from course_creators.models import CourseCreator
from django.core import mail
from student.roles import CourseCreatorRole
from student import auth
def mock_render_to_string(template_name, context):
"""Return a string that encodes template_name and context"""
return str((template_name, context))
class CourseCreatorAdminTest(TestCase):
"""
Tests for course creator admin.
"""
def setUp(self):
""" Test case setup """
super(CourseCreatorAdminTest, self).setUp()
self.user = User.objects.create_user('test_user', 'test_user+courses@edx.org', 'foo')
self.table_entry = CourseCreator(user=self.user)
self.table_entry.save()
self.admin = User.objects.create_user('Mark', 'admin+courses@edx.org', 'foo')
self.admin.is_staff = True
self.request = HttpRequest()
self.request.user = self.admin
self.creator_admin = CourseCreatorAdmin(self.table_entry, AdminSite())
self.studio_request_email = 'mark@marky.mark'
self.enable_creator_group_patch = {
"ENABLE_CREATOR_GROUP": True,
"STUDIO_REQUEST_EMAIL": self.studio_request_email
}
@mock.patch('course_creators.admin.render_to_string', mock.Mock(side_effect=mock_render_to_string, autospec=True))
@mock.patch('django.contrib.auth.models.User.email_user')
def test_change_status(self, email_user):
"""
Tests that updates to state impact the creator group maintained in authz.py and that e-mails are sent.
"""
def change_state_and_verify_email(state, is_creator):
""" Changes user state, verifies creator status, and verifies e-mail is sent based on transition """
self._change_state(state)
self.assertEqual(is_creator, auth.user_has_role(self.user, CourseCreatorRole()))
context = {'studio_request_email': self.studio_request_email}
if state == CourseCreator.GRANTED:
template = 'emails/course_creator_granted.txt'
elif state == CourseCreator.DENIED:
template = 'emails/course_creator_denied.txt'
else:
template = 'emails/course_creator_revoked.txt'
email_user.assert_called_with(
mock_render_to_string('emails/course_creator_subject.txt', context),
mock_render_to_string(template, context),
self.studio_request_email
)
with mock.patch.dict('django.conf.settings.FEATURES', self.enable_creator_group_patch):
# User is initially unrequested.
self.assertFalse(auth.user_has_role(self.user, CourseCreatorRole()))
change_state_and_verify_email(CourseCreator.GRANTED, True)
change_state_and_verify_email(CourseCreator.DENIED, False)
change_state_and_verify_email(CourseCreator.GRANTED, True)
change_state_and_verify_email(CourseCreator.PENDING, False)
change_state_and_verify_email(CourseCreator.GRANTED, True)
change_state_and_verify_email(CourseCreator.UNREQUESTED, False)
change_state_and_verify_email(CourseCreator.DENIED, False)
@mock.patch('course_creators.admin.render_to_string', mock.Mock(side_effect=mock_render_to_string, autospec=True))
def test_mail_admin_on_pending(self):
"""
Tests that the admin account is notified when a user is in the 'pending' state.
"""
def check_admin_message_state(state, expect_sent_to_admin, expect_sent_to_user):
""" Changes user state and verifies e-mail sent to admin address only when pending. """
mail.outbox = []
self._change_state(state)
# If a message is sent to the user about course creator status change, it will be the first
# message sent. Admin message will follow.
base_num_emails = 1 if expect_sent_to_user else 0
if expect_sent_to_admin:
context = {'user_name': "test_user", 'user_email': u'test_user+courses@edx.org'}
self.assertEquals(base_num_emails + 1, len(mail.outbox), 'Expected admin message to be sent')
sent_mail = mail.outbox[base_num_emails]
self.assertEquals(
mock_render_to_string('emails/course_creator_admin_subject.txt', context),
sent_mail.subject
)
self.assertEquals(
mock_render_to_string('emails/course_creator_admin_user_pending.txt', context),
sent_mail.body
)
self.assertEquals(self.studio_request_email, sent_mail.from_email)
self.assertEqual([self.studio_request_email], sent_mail.to)
else:
self.assertEquals(base_num_emails, len(mail.outbox))
with mock.patch.dict('django.conf.settings.FEATURES', self.enable_creator_group_patch):
# E-mail message should be sent to admin only when new state is PENDING, regardless of what
# previous state was (unless previous state was already PENDING).
# E-mail message sent to user only on transition into and out of GRANTED state.
check_admin_message_state(CourseCreator.UNREQUESTED, expect_sent_to_admin=False, expect_sent_to_user=False)
check_admin_message_state(CourseCreator.PENDING, expect_sent_to_admin=True, expect_sent_to_user=False)
check_admin_message_state(CourseCreator.GRANTED, expect_sent_to_admin=False, expect_sent_to_user=True)
check_admin_message_state(CourseCreator.DENIED, expect_sent_to_admin=False, expect_sent_to_user=True)
check_admin_message_state(CourseCreator.GRANTED, expect_sent_to_admin=False, expect_sent_to_user=True)
check_admin_message_state(CourseCreator.PENDING, expect_sent_to_admin=True, expect_sent_to_user=True)
check_admin_message_state(CourseCreator.PENDING, expect_sent_to_admin=False, expect_sent_to_user=False)
check_admin_message_state(CourseCreator.DENIED, expect_sent_to_admin=False, expect_sent_to_user=True)
def _change_state(self, state):
""" Helper method for changing state """
self.table_entry.state = state
self.creator_admin.save_model(self.request, self.table_entry, None, True)
def test_add_permission(self):
"""
Tests that staff cannot add entries
"""
self.assertFalse(self.creator_admin.has_add_permission(self.request))
def test_delete_permission(self):
"""
Tests that staff cannot delete entries
"""
self.assertFalse(self.creator_admin.has_delete_permission(self.request))
def test_change_permission(self):
"""
Tests that only staff can change entries
"""
self.assertTrue(self.creator_admin.has_change_permission(self.request))
self.request.user = self.user
self.assertFalse(self.creator_admin.has_change_permission(self.request))
| nttks/edx-platform | cms/djangoapps/course_creators/tests/test_admin.py | Python | agpl-3.0 | 7,332 |
import unittest
from app import read_config
class ConfigFileReaderTest(unittest.TestCase):
def test_read(self):
config = read_config('config')
self.assertEqual(config['cmus_host'], 'raspberry')
self.assertEqual(config['cmus_passwd'], 'PaSsWd')
self.assertEqual(config['app_host'], 'localhost')
self.assertEqual(config['app_port'], '8080')
if __name__ == '__main__':
unittest.main()
| jboynyc/cmus_app | tests.py | Python | agpl-3.0 | 433 |
from django.conf.urls.defaults import *
import frontend.views as frontend_views
import codewiki.views
import codewiki.viewsuml
from django.contrib.syndication.views import feed as feed_view
from django.views.generic import date_based, list_detail
from django.views.generic.simple import direct_to_template
from django.contrib import admin
import django.contrib.auth.views as auth_views
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponsePermanentRedirect
from django.contrib import admin
admin.autodiscover()
# Need to move this somewhere more useful and try to make it less hacky but
# seems to be the easiest way unfortunately.
from django.contrib.auth.models import User
User._meta.ordering = ['username']
from frontend.feeds import LatestCodeObjects, LatestCodeObjectsBySearchTerm, LatestCodeObjectsByTag, LatestViewObjects, LatestScraperObjects
feeds = {
'all_code_objects': LatestCodeObjects,
'all_scrapers': LatestScraperObjects,
'all_views': LatestViewObjects,
'latest_code_objects_by_search_term': LatestCodeObjectsBySearchTerm,
'latest_code_objects_by_tag': LatestCodeObjectsByTag,
}
urlpatterns = patterns('',
url(r'^$', frontend_views.frontpage, name="frontpage"),
# redirects from old version (would clashes if you happen to have a scraper whose name is list!)
(r'^scrapers/list/$', lambda request: HttpResponseRedirect(reverse('scraper_list_wiki_type', args=['scraper']))),
url(r'^', include('codewiki.urls')),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name="logout"),
url(r'^accounts/', include('registration.urls')),
url(r'^accounts/resend_activation_email/', frontend_views.resend_activation_email, name="resend_activation_email"),
url(r'^captcha/', include('captcha.urls')),
url(r'^attachauth', codewiki.views.attachauth),
# allows direct viewing of the django tables
url(r'^admin/', include(admin.site.urls)),
# favicon
(r'^favicon\.ico$', 'django.views.generic.simple.redirect_to', {'url': '/media/images/favicon.ico'}),
# RSS feeds
url(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}, name='feeds'),
# API
(r'^api/', include('api.urls', namespace='foo', app_name='api')),
# Status
url(r'^status/$', codewiki.viewsuml.status, name='status'),
# Documentation
(r'^docs/', include('documentation.urls')),
# Robots.txt
(r'^robots.txt$', direct_to_template, {'template': 'robots.txt', 'mimetype': 'text/plain'}),
# pdf cropper technology
(r'^cropper/', include('cropper.urls')),
# froth
(r'^froth/', include('froth.urls')),
# static media server for the dev sites / local dev
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_DIR, 'show_indexes':True}),
url(r'^media-admin/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ADMIN_DIR, 'show_indexes':True}),
#Rest of the site
url(r'^', include('frontend.urls')),
# redirects from old version
(r'^editor/$', lambda request: HttpResponseRedirect('/scrapers/new/python?template=tutorial_python_trivial')),
(r'^scrapers/show/(?P<short_name>[\w_\-]+)/(?:data/|map-only/)?$',
lambda request, short_name: HttpResponseRedirect(reverse('code_overview', args=['scraper', short_name]))),
)
| rossjones/ScraperWikiX | web/urls.py | Python | agpl-3.0 | 3,556 |
#!/usr/bin/env python
import sys
import gobject
import dbus.mainloop.glib
dbus.mainloop.glib.DBusGMainLoop(set_as_default = True)
import telepathy
DBUS_PROPERTIES = 'org.freedesktop.DBus.Properties'
def get_registry():
reg = telepathy.client.ManagerRegistry()
reg.LoadManagers()
return reg
def get_connection_manager(reg):
cm = reg.GetManager('bluewire')
return cm
class Action(object):
def __init__(self):
self._action = None
def queue_action(self):
pass
def append_action(self, action):
assert self._action is None
self._action = action
def get_next_action(self):
assert self._action is not None
return self._action
def _on_done(self):
if self._action is None:
return
self._action.queue_action()
def _on_error(self, error):
print error
def _on_generic_message(self, *args):
pass
class DummyAction(Action):
def queue_action(self):
gobject.idle_add(self._on_done)
class QuitLoop(Action):
def __init__(self, loop):
super(QuitLoop, self).__init__()
self._loop = loop
def queue_action(self):
self._loop.quit()
class DisplayParams(Action):
def __init__(self, cm):
super(DisplayParams, self).__init__()
self._cm = cm
def queue_action(self):
self._cm[telepathy.interfaces.CONN_MGR_INTERFACE].GetParameters(
'bluetooth,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, params):
print "Connection Parameters:"
for name, flags, signature, default in params:
print "\t%s (%s)" % (name, signature),
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_SECRET:
print "secret",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_DBUS_PROPERTY:
print "dbus-property",
if flags & telepathy.constants.CONN_MGR_PARAM_FLAG_HAS_DEFAULT:
print "has-default(%s)" % default,
print ""
super(DisplayParams, self)._on_done()
class RequestConnection(Action):
def __init__(self, cm, username, password, forward):
super(RequestConnection, self).__init__()
self._cm = cm
self._conn = None
self._serviceName = None
self._username = username
self._password = password
self._forward = forward
@property
def conn(self):
return self._conn
@property
def serviceName(self):
return self._serviceName
def queue_action(self):
self._cm[telepathy.server.CONNECTION_MANAGER].RequestConnection(
'bluetooth",
{
'account': self._username,
'password': self._password,
'forward': self._forward,
},
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, busName, objectPath):
self._serviceName = busName
self._conn = telepathy.client.Connection(busName, objectPath)
super(RequestConnection, self)._on_done()
class Connect(Action):
def __init__(self, connAction):
super(Connect, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].connect_to_signal(
'StatusChanged',
self._on_change,
)
self._connAction.conn[telepathy.server.CONNECTION].Connect(
reply_handler = self._on_generic_message,
error_handler = self._on_error,
)
def _on_done(self):
super(Connect, self)._on_done()
def _on_change(self, status, reason):
if status == telepathy.constants.CONNECTION_STATUS_DISCONNECTED:
print "Disconnected!"
self._conn = None
elif status == telepathy.constants.CONNECTION_STATUS_CONNECTED:
print "Connected"
self._on_done()
elif status == telepathy.constants.CONNECTION_STATUS_CONNECTING:
print "Connecting"
else:
print "Status: %r" % status
class SimplePresenceOptions(Action):
def __init__(self, connAction):
super(SimplePresenceOptions, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[DBUS_PROPERTIES].Get(
telepathy.server.CONNECTION_INTERFACE_SIMPLE_PRESENCE,
'Statuses',
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, statuses):
print "\tAvailable Statuses"
for (key, value) in statuses.iteritems():
print "\t\t - %s" % key
super(SimplePresenceOptions, self)._on_done()
class NullHandle(object):
@property
def handle(self):
return 0
@property
def handles(self):
return []
class UserHandle(Action):
def __init__(self, connAction):
super(UserHandle, self).__init__()
self._connAction = connAction
self._handle = None
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].GetSelfHandle(
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handle):
self._handle = handle
super(UserHandle, self)._on_done()
class RequestHandle(Action):
def __init__(self, connAction, handleType, handleNames):
super(RequestHandle, self).__init__()
self._connAction = connAction
self._handle = None
self._handleType = handleType
self._handleNames = handleNames
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].RequestHandles(
self._handleType,
self._handleNames,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handles):
self._handle = handles[0]
super(RequestHandle, self)._on_done()
class RequestChannel(Action):
def __init__(self, connAction, handleAction, channelType, handleType):
super(RequestChannel, self).__init__()
self._connAction = connAction
self._handleAction = handleAction
self._channel = None
self._channelType = channelType
self._handleType = handleType
@property
def channel(self):
return self._channel
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].RequestChannel(
self._channelType,
self._handleType,
self._handleAction.handle,
True,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, channelObjectPath):
self._channel = telepathy.client.Channel(self._connAction.serviceName, channelObjectPath)
super(RequestChannel, self)._on_done()
class EnsureChannel(Action):
def __init__(self, connAction, channelType, handleType, handleId):
super(EnsureChannel, self).__init__()
self._connAction = connAction
self._channel = None
self._channelType = channelType
self._handleType = handleType
self._handleId = handleId
self._handle = None
@property
def channel(self):
return self._channel
@property
def handle(self):
return self._handle
@property
def handles(self):
return [self._handle]
def queue_action(self):
properties = {
telepathy.server.CHANNEL_INTERFACE+".ChannelType": self._channelType,
telepathy.server.CHANNEL_INTERFACE+".TargetHandleType": self._handleType,
telepathy.server.CHANNEL_INTERFACE+".TargetID": self._handleId,
}
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_REQUESTS].EnsureChannel(
properties,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, yours, channelObjectPath, properties):
print "Create?", not not yours
print "Path:", channelObjectPath
print "Properties:", properties
self._channel = telepathy.client.Channel(self._connAction.serviceName, channelObjectPath)
self._handle = properties[telepathy.server.CHANNEL_INTERFACE+".TargetHandle"]
super(EnsureChannel, self)._on_done()
class CloseChannel(Action):
def __init__(self, connAction, chanAction):
super(CloseChannel, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handles = []
def queue_action(self):
self._chanAction.channel[telepathy.server.CHANNEL].Close(
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self):
super(CloseChannel, self)._on_done()
class ContactHandles(Action):
def __init__(self, connAction, chanAction):
super(ContactHandles, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handles = []
@property
def handles(self):
return self._handles
def queue_action(self):
self._chanAction.channel[DBUS_PROPERTIES].Get(
telepathy.server.CHANNEL_INTERFACE_GROUP,
'Members',
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handles):
self._handles = list(handles)
super(ContactHandles, self)._on_done()
class SimplePresenceStatus(Action):
def __init__(self, connAction, handleAction):
super(SimplePresenceStatus, self).__init__()
self._connAction = connAction
self._handleAction = handleAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_SIMPLE_PRESENCE].GetPresences(
self._handleAction.handles,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, aliases):
print "\tPresences:"
for hid, (presenceType, presence, presenceMessage) in aliases.iteritems():
print "\t\t%s:" % hid, presenceType, presence, presenceMessage
super(SimplePresenceStatus, self)._on_done()
class SetSimplePresence(Action):
def __init__(self, connAction, status, message):
super(SetSimplePresence, self).__init__()
self._connAction = connAction
self._status = status
self._message = message
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_SIMPLE_PRESENCE].SetPresence(
self._status,
self._message,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self):
super(SetSimplePresence, self)._on_done()
class Aliases(Action):
def __init__(self, connAction, handleAction):
super(Aliases, self).__init__()
self._connAction = connAction
self._handleAction = handleAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION_INTERFACE_ALIASING].RequestAliases(
self._handleAction.handles,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, aliases):
print "\tAliases:"
for h, alias in zip(self._handleAction.handles, aliases):
print "\t\t", h, alias
super(Aliases, self)._on_done()
class Call(Action):
def __init__(self, connAction, chanAction, handleAction):
super(Call, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handleAction = handleAction
def queue_action(self):
self._chanAction.channel[telepathy.server.CHANNEL_TYPE_STREAMED_MEDIA].RequestStreams(
self._handleAction.handle,
[telepathy.constants.MEDIA_STREAM_TYPE_AUDIO],
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self, handle):
print "Call started"
super(Call, self)._on_done()
class SendText(Action):
def __init__(self, connAction, chanAction, handleAction, messageType, message):
super(SendText, self).__init__()
self._connAction = connAction
self._chanAction = chanAction
self._handleAction = handleAction
self._messageType = messageType
self._message = message
def queue_action(self):
self._chanAction.channel[telepathy.server.CHANNEL_TYPE_TEXT].Send(
self._messageType,
self._message,
reply_handler = self._on_done,
error_handler = self._on_error,
)
def _on_done(self,):
print "Message sent"
super(SendText, self)._on_done()
class Sleep(Action):
def __init__(self, length):
super(Sleep, self).__init__()
self._length = length
def queue_action(self):
gobject.timeout_add(self._length, self._on_done)
class Block(Action):
def __init__(self):
super(Block, self).__init__()
def queue_action(self):
print "Blocking"
def _on_done(self):
#super(SendText, self)._on_done()
pass
class Disconnect(Action):
def __init__(self, connAction):
super(Disconnect, self).__init__()
self._connAction = connAction
def queue_action(self):
self._connAction.conn[telepathy.server.CONNECTION].Disconnect(
reply_handler = self._on_done,
error_handler = self._on_error,
)
if __name__ == '__main__':
loop = gobject.MainLoop()
reg = get_registry()
cm = get_connection_manager(reg)
nullHandle = NullHandle()
dummy = DummyAction()
firstAction = dummy
lastAction = dummy
if True:
dp = DisplayParams(cm)
lastAction.append_action(dp)
lastAction = lastAction.get_next_action()
if True:
username = sys.argv[1]
password = sys.argv[2]
forward = sys.argv[3]
reqcon = RequestConnection(cm, username, password, forward)
lastAction.append_action(reqcon)
lastAction = lastAction.get_next_action()
if False:
reqcon = RequestConnection(cm, username, password, forward)
lastAction.append_action(reqcon)
lastAction = lastAction.get_next_action()
con = Connect(reqcon)
lastAction.append_action(con)
lastAction = lastAction.get_next_action()
if True:
spo = SimplePresenceOptions(reqcon)
lastAction.append_action(spo)
lastAction = lastAction.get_next_action()
if True:
uh = UserHandle(reqcon)
lastAction.append_action(uh)
lastAction = lastAction.get_next_action()
ua = Aliases(reqcon, uh)
lastAction.append_action(ua)
lastAction = lastAction.get_next_action()
sps = SimplePresenceStatus(reqcon, uh)
lastAction.append_action(sps)
lastAction = lastAction.get_next_action()
if False:
setdnd = SetSimplePresence(reqcon, "dnd", "")
lastAction.append_action(setdnd)
lastAction = lastAction.get_next_action()
sps = SimplePresenceStatus(reqcon, uh)
lastAction.append_action(sps)
lastAction = lastAction.get_next_action()
setdnd = SetSimplePresence(reqcon, "available", "")
lastAction.append_action(setdnd)
lastAction = lastAction.get_next_action()
sps = SimplePresenceStatus(reqcon, uh)
lastAction.append_action(sps)
lastAction = lastAction.get_next_action()
if False:
sl = Sleep(10 * 1000)
lastAction.append_action(sl)
lastAction = lastAction.get_next_action()
if False:
rclh = RequestHandle(reqcon, telepathy.HANDLE_TYPE_LIST, ["subscribe"])
lastAction.append_action(rclh)
lastAction = lastAction.get_next_action()
rclc = RequestChannel(
reqcon,
rclh,
telepathy.CHANNEL_TYPE_CONTACT_LIST,
telepathy.HANDLE_TYPE_LIST,
)
lastAction.append_action(rclc)
lastAction = lastAction.get_next_action()
ch = ContactHandles(reqcon, rclc)
lastAction.append_action(ch)
lastAction = lastAction.get_next_action()
ca = Aliases(reqcon, ch)
lastAction.append_action(ca)
lastAction = lastAction.get_next_action()
if True:
accountNumber = sys.argv[4]
enChan = EnsureChannel(reqcon, telepathy.CHANNEL_TYPE_TEXT, telepathy.HANDLE_TYPE_CONTACT, accountNumber)
lastAction.append_action(enChan)
lastAction = lastAction.get_next_action()
sendDebugtext = SendText(reqcon, enChan, enChan, telepathy.CHANNEL_TEXT_MESSAGE_TYPE_NORMAL, "Boo!")
lastAction.append_action(sendDebugtext)
lastAction = lastAction.get_next_action()
if False:
rch = RequestHandle(reqcon, telepathy.HANDLE_TYPE_CONTACT, ["18005558355"]) #(1-800-555-TELL)
lastAction.append_action(rch)
lastAction = lastAction.get_next_action()
# making a phone call
if True:
smHandle = rch
smHandleType = telepathy.HANDLE_TYPE_CONTACT
else:
smHandle = nullHandle
smHandleType = telepathy.HANDLE_TYPE_NONE
rsmc = RequestChannel(
reqcon,
smHandle,
telepathy.CHANNEL_TYPE_STREAMED_MEDIA,
smHandleType,
)
lastAction.append_action(rsmc)
lastAction = lastAction.get_next_action()
if False:
call = Call(reqcon, rsmc, rch)
lastAction.append_action(call)
lastAction = lastAction.get_next_action()
# sending a text
rtc = RequestChannel(
reqcon,
rch,
telepathy.CHANNEL_TYPE_TEXT,
smHandleType,
)
lastAction.append_action(rtc)
lastAction = lastAction.get_next_action()
if True:
closechan = CloseChannel(reqcon, rtc)
lastAction.append_action(closechan)
lastAction = lastAction.get_next_action()
rtc = RequestChannel(
reqcon,
rch,
telepathy.CHANNEL_TYPE_TEXT,
smHandleType,
)
lastAction.append_action(rtc)
lastAction = lastAction.get_next_action()
if False:
sendtext = SendText(reqcon, rtc, rch, telepathy.CHANNEL_TEXT_MESSAGE_TYPE_NORMAL, "Boo!")
lastAction.append_action(sendtext)
lastAction = lastAction.get_next_action()
if False:
bl = Block()
lastAction.append_action(bl)
lastAction = lastAction.get_next_action()
if False:
sl = Sleep(30 * 1000)
lastAction.append_action(sl)
lastAction = lastAction.get_next_action()
dis = Disconnect(reqcon)
lastAction.append_action(dis)
lastAction = lastAction.get_next_action()
quitter = QuitLoop(loop)
lastAction.append_action(quitter)
lastAction = lastAction.get_next_action()
firstAction.queue_action()
loop.run()
| epage/telepathy-bluewire | hand_tests/generic.py | Python | lgpl-2.1 | 17,072 |
import json
import etcd
from tendrl.gluster_bridge.atoms.volume.set import Set
class SetVolumeOption(object):
def __init__(self, api_job):
super(SetVolumeOption, self).__init__()
self.api_job = api_job
self.atom = SetVolumeOption
def start(self):
attributes = json.loads(self.api_job['attributes'].decode('utf-8'))
vol_name = attributes['volname']
option = attributes['option_name']
option_value = attributes['option_value']
self.atom().start(vol_name, option, option_value)
self.api_job['status'] = "finished"
etcd.Client().write(self.api_job['request_id'],
json.dumps(self.api_job))
| shtripat/gluster_bridge | tendrl/gluster_bridge/flows/set_volume_option.py | Python | lgpl-2.1 | 705 |
import sys
import time
sleep = time.sleep
if sys.platform == 'win32':
time = time.clock
else:
time = time.time
| egbertbouman/tribler-g | Tribler/Core/DecentralizedTracking/pymdht/core/ptime.py | Python | lgpl-2.1 | 124 |
# Authors: David Goodger; Gunnar Schwant
# Contact: goodger@users.sourceforge.net
# Revision: $Revision: 21817 $
# Date: $Date: 2005-07-21 13:39:57 -0700 (Thu, 21 Jul 2005) $
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
German language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
'author': 'Autor',
'authors': 'Autoren',
'organization': 'Organisation',
'address': 'Adresse',
'contact': 'Kontakt',
'version': 'Version',
'revision': 'Revision',
'status': 'Status',
'date': 'Datum',
'dedication': 'Widmung',
'copyright': 'Copyright',
'abstract': 'Zusammenfassung',
'attention': 'Achtung!',
'caution': 'Vorsicht!',
'danger': '!GEFAHR!',
'error': 'Fehler',
'hint': 'Hinweis',
'important': 'Wichtig',
'note': 'Bemerkung',
'tip': 'Tipp',
'warning': 'Warnung',
'contents': 'Inhalt'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
'autor': 'author',
'autoren': 'authors',
'organisation': 'organization',
'adresse': 'address',
'kontakt': 'contact',
'version': 'version',
'revision': 'revision',
'status': 'status',
'datum': 'date',
'copyright': 'copyright',
'widmung': 'dedication',
'zusammenfassung': 'abstract'}
"""German (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
| garinh/cs | docs/support/docutils/languages/de.py | Python | lgpl-2.1 | 1,814 |
# Orca
#
# Copyright 2005-2009 Sun Microsystems Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""Displays a GUI for the user to set Orca preferences."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2005-2009 Sun Microsystems Inc."
__license__ = "LGPL"
import os
from gi.repository import Gdk
from gi.repository import GLib
from gi.repository import Gtk
from gi.repository import GObject
from gi.repository import Pango
import pyatspi
import time
from . import acss
from . import debug
from . import guilabels
from . import messages
from . import orca
from . import orca_gtkbuilder
from . import orca_gui_profile
from . import orca_state
from . import orca_platform
from . import settings
from . import settings_manager
from . import input_event
from . import keybindings
from . import pronunciation_dict
from . import braille
from . import speech
from . import speechserver
from . import text_attribute_names
_settingsManager = settings_manager.getManager()
try:
import louis
except ImportError:
louis = None
from .orca_platform import tablesdir
if louis and not tablesdir:
louis = None
(HANDLER, DESCRIP, MOD_MASK1, MOD_USED1, KEY1, CLICK_COUNT1, OLDTEXT1, \
TEXT1, MODIF, EDITABLE) = list(range(10))
(NAME, IS_SPOKEN, IS_BRAILLED, VALUE) = list(range(4))
(ACTUAL, REPLACEMENT) = list(range(2))
# Must match the order of voice types in the GtkBuilder file.
#
(DEFAULT, UPPERCASE, HYPERLINK, SYSTEM) = list(range(4))
# Must match the order that the timeFormatCombo is populated.
#
(TIME_FORMAT_LOCALE, TIME_FORMAT_12_HM, TIME_FORMAT_12_HMS, TIME_FORMAT_24_HMS,
TIME_FORMAT_24_HMS_WITH_WORDS, TIME_FORMAT_24_HM,
TIME_FORMAT_24_HM_WITH_WORDS) = list(range(7))
# Must match the order that the dateFormatCombo is populated.
#
(DATE_FORMAT_LOCALE, DATE_FORMAT_NUMBERS_DM, DATE_FORMAT_NUMBERS_MD,
DATE_FORMAT_NUMBERS_DMY, DATE_FORMAT_NUMBERS_MDY, DATE_FORMAT_NUMBERS_YMD,
DATE_FORMAT_FULL_DM, DATE_FORMAT_FULL_MD, DATE_FORMAT_FULL_DMY,
DATE_FORMAT_FULL_MDY, DATE_FORMAT_FULL_YMD, DATE_FORMAT_ABBREVIATED_DM,
DATE_FORMAT_ABBREVIATED_MD, DATE_FORMAT_ABBREVIATED_DMY,
DATE_FORMAT_ABBREVIATED_MDY, DATE_FORMAT_ABBREVIATED_YMD) = list(range(16))
class OrcaSetupGUI(orca_gtkbuilder.GtkBuilderWrapper):
def __init__(self, fileName, windowName, prefsDict):
"""Initialize the Orca configuration GUI.
Arguments:
- fileName: name of the GtkBuilder file.
- windowName: name of the component to get from the GtkBuilder file.
- prefsDict: dictionary of preferences to use during initialization
"""
orca_gtkbuilder.GtkBuilderWrapper.__init__(self, fileName, windowName)
self.prefsDict = prefsDict
self._defaultProfile = ['Default', 'default']
# Initialize variables to None to keep pylint happy.
#
self.bbindings = None
self.cellRendererText = None
self.defaultVoice = None
self.disableKeyGrabPref = None
self.getTextAttributesView = None
self.hyperlinkVoice = None
self.initializingSpeech = None
self.kbindings = None
self.keyBindingsModel = None
self.keyBindView = None
self.newBinding = None
self.pendingKeyBindings = None
self.planeCellRendererText = None
self.pronunciationModel = None
self.pronunciationView = None
self.screenHeight = None
self.screenWidth = None
self.speechFamiliesChoice = None
self.speechFamiliesChoices = None
self.speechFamiliesModel = None
self.speechLanguagesChoice = None
self.speechLanguagesChoices = None
self.speechLanguagesModel = None
self.speechFamilies = []
self.speechServersChoice = None
self.speechServersChoices = None
self.speechServersModel = None
self.speechSystemsChoice = None
self.speechSystemsChoices = None
self.speechSystemsModel = None
self.systemVoice = None
self.uppercaseVoice = None
self.window = None
self.workingFactories = None
self.savedGain = None
self.savedPitch = None
self.savedRate = None
self._isInitialSetup = False
self.selectedFamilyChoices = {}
self.selectedLanguageChoices = {}
self.profilesCombo = None
self.profilesComboModel = None
self.startingProfileCombo = None
self._capturedKey = []
self.script = None
def init(self, script):
"""Initialize the Orca configuration GUI. Read the users current
set of preferences and set the GUI state to match. Setup speech
support and populate the combo box lists on the Speech Tab pane
accordingly.
"""
self.script = script
# Restore the default rate/pitch/gain,
# in case the user played with the sliders.
#
try:
voices = _settingsManager.getSetting('voices')
defaultVoice = voices[settings.DEFAULT_VOICE]
except KeyError:
defaultVoice = {}
try:
self.savedGain = defaultVoice[acss.ACSS.GAIN]
except KeyError:
self.savedGain = 10.0
try:
self.savedPitch = defaultVoice[acss.ACSS.AVERAGE_PITCH]
except KeyError:
self.savedPitch = 5.0
try:
self.savedRate = defaultVoice[acss.ACSS.RATE]
except KeyError:
self.savedRate = 50.0
# ***** Key Bindings treeview initialization *****
self.keyBindView = self.get_widget("keyBindingsTreeview")
if self.keyBindView.get_columns():
for column in self.keyBindView.get_columns():
self.keyBindView.remove_column(column)
self.keyBindingsModel = Gtk.TreeStore(
GObject.TYPE_STRING, # Handler name
GObject.TYPE_STRING, # Human Readable Description
GObject.TYPE_STRING, # Modifier mask 1
GObject.TYPE_STRING, # Used Modifiers 1
GObject.TYPE_STRING, # Modifier key name 1
GObject.TYPE_STRING, # Click count 1
GObject.TYPE_STRING, # Original Text of the Key Binding Shown 1
GObject.TYPE_STRING, # Text of the Key Binding Shown 1
GObject.TYPE_BOOLEAN, # Key Modified by User
GObject.TYPE_BOOLEAN) # Row with fields editable or not
self.planeCellRendererText = Gtk.CellRendererText()
self.cellRendererText = Gtk.CellRendererText()
self.cellRendererText.set_property("ellipsize", Pango.EllipsizeMode.END)
# HANDLER - invisble column
#
column = Gtk.TreeViewColumn("Handler",
self.planeCellRendererText,
text=HANDLER)
column.set_resizable(True)
column.set_visible(False)
column.set_sort_column_id(HANDLER)
self.keyBindView.append_column(column)
# DESCRIP
#
column = Gtk.TreeViewColumn(guilabels.KB_HEADER_FUNCTION,
self.cellRendererText,
text=DESCRIP)
column.set_resizable(True)
column.set_min_width(380)
column.set_sort_column_id(DESCRIP)
self.keyBindView.append_column(column)
# MOD_MASK1 - invisble column
#
column = Gtk.TreeViewColumn("Mod.Mask 1",
self.planeCellRendererText,
text=MOD_MASK1)
column.set_visible(False)
column.set_resizable(True)
column.set_sort_column_id(MOD_MASK1)
self.keyBindView.append_column(column)
# MOD_USED1 - invisble column
#
column = Gtk.TreeViewColumn("Use Mod.1",
self.planeCellRendererText,
text=MOD_USED1)
column.set_visible(False)
column.set_resizable(True)
column.set_sort_column_id(MOD_USED1)
self.keyBindView.append_column(column)
# KEY1 - invisble column
#
column = Gtk.TreeViewColumn("Key1",
self.planeCellRendererText,
text=KEY1)
column.set_resizable(True)
column.set_visible(False)
column.set_sort_column_id(KEY1)
self.keyBindView.append_column(column)
# CLICK_COUNT1 - invisble column
#
column = Gtk.TreeViewColumn("ClickCount1",
self.planeCellRendererText,
text=CLICK_COUNT1)
column.set_resizable(True)
column.set_visible(False)
column.set_sort_column_id(CLICK_COUNT1)
self.keyBindView.append_column(column)
# OLDTEXT1 - invisble column which will store a copy of the
# original keybinding in TEXT1 prior to the Apply or OK
# buttons being pressed. This will prevent automatic
# resorting each time a cell is edited.
#
column = Gtk.TreeViewColumn("OldText1",
self.planeCellRendererText,
text=OLDTEXT1)
column.set_resizable(True)
column.set_visible(False)
column.set_sort_column_id(OLDTEXT1)
self.keyBindView.append_column(column)
# TEXT1
#
rendererText = Gtk.CellRendererText()
rendererText.connect("editing-started",
self.editingKey,
self.keyBindingsModel)
rendererText.connect("editing-canceled",
self.editingCanceledKey)
rendererText.connect('edited',
self.editedKey,
self.keyBindingsModel,
MOD_MASK1, MOD_USED1, KEY1, CLICK_COUNT1, TEXT1)
column = Gtk.TreeViewColumn(guilabels.KB_HEADER_KEY_BINDING,
rendererText,
text=TEXT1,
editable=EDITABLE)
column.set_resizable(True)
column.set_sort_column_id(OLDTEXT1)
self.keyBindView.append_column(column)
# MODIF
#
rendererToggle = Gtk.CellRendererToggle()
rendererToggle.connect('toggled',
self.keyModifiedToggle,
self.keyBindingsModel,
MODIF)
column = Gtk.TreeViewColumn(guilabels.KB_MODIFIED,
rendererToggle,
active=MODIF,
activatable=EDITABLE)
#column.set_visible(False)
column.set_resizable(True)
column.set_sort_column_id(MODIF)
self.keyBindView.append_column(column)
# EDITABLE - invisble column
#
rendererToggle = Gtk.CellRendererToggle()
rendererToggle.set_property('activatable', False)
column = Gtk.TreeViewColumn("Modified",
rendererToggle,
active=EDITABLE)
column.set_visible(False)
column.set_resizable(True)
column.set_sort_column_id(EDITABLE)
self.keyBindView.append_column(column)
# Populates the treeview with all the keybindings:
#
self._populateKeyBindings()
self.window = self.get_widget("orcaSetupWindow")
self._setKeyEchoItems()
self.speechSystemsModel = \
self._initComboBox(self.get_widget("speechSystems"))
self.speechServersModel = \
self._initComboBox(self.get_widget("speechServers"))
self.speechLanguagesModel = \
self._initComboBox(self.get_widget("speechLanguages"))
self.speechFamiliesModel = \
self._initComboBox(self.get_widget("speechFamilies"))
self._initSpeechState()
# TODO - JD: Will this ever be the case??
self._isInitialSetup = \
not os.path.exists(_settingsManager.getPrefsDir())
appPage = self.script.getAppPreferencesGUI()
if appPage:
label = Gtk.Label(label=self.script.app.name)
self.get_widget("notebook").append_page(appPage, label)
self._initGUIState()
def _getACSSForVoiceType(self, voiceType):
"""Return the ACSS value for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
Returns the voice dictionary for the given voice type.
"""
if voiceType == DEFAULT:
voiceACSS = self.defaultVoice
elif voiceType == UPPERCASE:
voiceACSS = self.uppercaseVoice
elif voiceType == HYPERLINK:
voiceACSS = self.hyperlinkVoice
elif voiceType == SYSTEM:
voiceACSS = self.systemVoice
else:
voiceACSS = self.defaultVoice
return voiceACSS
def writeUserPreferences(self):
"""Write out the user's generic Orca preferences.
"""
pronunciationDict = self.getModelDict(self.pronunciationModel)
keyBindingsDict = self.getKeyBindingsModelDict(self.keyBindingsModel)
self.prefsDict.update(self.script.getPreferencesFromGUI())
_settingsManager.saveSettings(self.script,
self.prefsDict,
pronunciationDict,
keyBindingsDict)
def _getKeyValueForVoiceType(self, voiceType, key, useDefault=True):
"""Look for the value of the given key in the voice dictionary
for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
- key: the key to look for in the voice dictionary.
- useDefault: if True, and the key isn't found for the given voice
type, the look for it in the default voice dictionary
as well.
Returns the value of the given key, or None if it's not set.
"""
if voiceType == DEFAULT:
voice = self.defaultVoice
elif voiceType == UPPERCASE:
voice = self.uppercaseVoice
if key not in voice:
if not useDefault:
return None
voice = self.defaultVoice
elif voiceType == HYPERLINK:
voice = self.hyperlinkVoice
if key not in voice:
if not useDefault:
return None
voice = self.defaultVoice
elif voiceType == SYSTEM:
voice = self.systemVoice
if key not in voice:
if not useDefault:
return None
voice = self.defaultVoice
else:
voice = self.defaultVoice
if key in voice:
return voice[key]
else:
return None
def _getFamilyNameForVoiceType(self, voiceType):
"""Gets the name of the voice family for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
Returns the name of the voice family for the given voice type,
or None if not set.
"""
familyName = None
family = self._getKeyValueForVoiceType(voiceType, acss.ACSS.FAMILY)
if family and speechserver.VoiceFamily.NAME in family:
familyName = family[speechserver.VoiceFamily.NAME]
return familyName
def _setFamilyNameForVoiceType(self, voiceType, name, language, dialect, variant):
"""Sets the name of the voice family for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
- name: the name of the voice family to set.
- language: the locale of the voice family to set.
- dialect: the dialect of the voice family to set.
"""
family = self._getKeyValueForVoiceType(voiceType,
acss.ACSS.FAMILY,
False)
voiceACSS = self._getACSSForVoiceType(voiceType)
if family:
family[speechserver.VoiceFamily.NAME] = name
family[speechserver.VoiceFamily.LANG] = language
family[speechserver.VoiceFamily.DIALECT] = dialect
family[speechserver.VoiceFamily.VARIANT] = variant
else:
voiceACSS[acss.ACSS.FAMILY] = {}
voiceACSS[acss.ACSS.FAMILY][speechserver.VoiceFamily.NAME] = name
voiceACSS[acss.ACSS.FAMILY][speechserver.VoiceFamily.LANG] = language
voiceACSS[acss.ACSS.FAMILY][speechserver.VoiceFamily.DIALECT] = dialect
voiceACSS[acss.ACSS.FAMILY][speechserver.VoiceFamily.VARIANT] = variant
voiceACSS['established'] = True
#settings.voices[voiceType] = voiceACSS
def _getRateForVoiceType(self, voiceType):
"""Gets the speaking rate value for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
Returns the rate value for the given voice type, or None if
not set.
"""
return self._getKeyValueForVoiceType(voiceType, acss.ACSS.RATE)
def _setRateForVoiceType(self, voiceType, value):
"""Sets the speaking rate value for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
- value: the rate value to set.
"""
voiceACSS = self._getACSSForVoiceType(voiceType)
voiceACSS[acss.ACSS.RATE] = value
voiceACSS['established'] = True
#settings.voices[voiceType] = voiceACSS
def _getPitchForVoiceType(self, voiceType):
"""Gets the pitch value for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
Returns the pitch value for the given voice type, or None if
not set.
"""
return self._getKeyValueForVoiceType(voiceType,
acss.ACSS.AVERAGE_PITCH)
def _setPitchForVoiceType(self, voiceType, value):
"""Sets the pitch value for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
- value: the pitch value to set.
"""
voiceACSS = self._getACSSForVoiceType(voiceType)
voiceACSS[acss.ACSS.AVERAGE_PITCH] = value
voiceACSS['established'] = True
#settings.voices[voiceType] = voiceACSS
def _getVolumeForVoiceType(self, voiceType):
"""Gets the volume (gain) value for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
Returns the volume (gain) value for the given voice type, or
None if not set.
"""
return self._getKeyValueForVoiceType(voiceType, acss.ACSS.GAIN)
def _setVolumeForVoiceType(self, voiceType, value):
"""Sets the volume (gain) value for the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
- value: the volume (gain) value to set.
"""
voiceACSS = self._getACSSForVoiceType(voiceType)
voiceACSS[acss.ACSS.GAIN] = value
voiceACSS['established'] = True
#settings.voices[voiceType] = voiceACSS
def _setVoiceSettingsForVoiceType(self, voiceType):
"""Sets the family, rate, pitch and volume GUI components based
on the given voice type.
Arguments:
- voiceType: one of DEFAULT, UPPERCASE, HYPERLINK, SYSTEM
"""
familyName = self._getFamilyNameForVoiceType(voiceType)
self._setSpeechFamiliesChoice(familyName)
rate = self._getRateForVoiceType(voiceType)
if rate is not None:
self.get_widget("rateScale").set_value(rate)
else:
self.get_widget("rateScale").set_value(50.0)
pitch = self._getPitchForVoiceType(voiceType)
if pitch is not None:
self.get_widget("pitchScale").set_value(pitch)
else:
self.get_widget("pitchScale").set_value(5.0)
volume = self._getVolumeForVoiceType(voiceType)
if volume is not None:
self.get_widget("volumeScale").set_value(volume)
else:
self.get_widget("volumeScale").set_value(10.0)
def _setSpeechFamiliesChoice(self, familyName):
"""Sets the active item in the families ("Person:") combo box
to the given family name.
Arguments:
- familyName: the family name to use to set the active combo box item.
"""
if len(self.speechFamilies) == 0:
return
languageSet = False
familySet = False
for family in self.speechFamilies:
name = family[speechserver.VoiceFamily.NAME]
if name == familyName:
lang = family[speechserver.VoiceFamily.LANG]
dialect = family[speechserver.VoiceFamily.DIALECT]
if dialect:
language = lang + '-' + dialect
else:
language = lang
i = 0
for languageChoice in self.speechLanguagesChoices:
if languageChoice == language:
self.get_widget("speechLanguages").set_active(i)
self.speechLanguagesChoice = self.speechLanguagesChoices[i]
languageSet = True
self._setupFamilies()
i = 0
for familyChoice in self.speechFamiliesChoices:
name = familyChoice[speechserver.VoiceFamily.NAME]
if name == familyName:
self.get_widget("speechFamilies").set_active(i)
self.speechFamiliesChoice = self.speechFamiliesChoices[i]
familySet = True
break
i += 1
break
i += 1
break
if not languageSet:
debug.println(debug.LEVEL_FINEST,
"Could not find speech language match for %s" \
% familyName)
self.get_widget("speechLanguages").set_active(0)
self.speechLanguagesChoice = self.speechLanguagesChoices[0]
if languageSet:
self.selectedLanguageChoices[self.speechServersChoice] = i
if not familySet:
debug.println(debug.LEVEL_FINEST,
"Could not find speech family match for %s" \
% familyName)
self.get_widget("speechFamilies").set_active(0)
self.speechFamiliesChoice = self.speechFamiliesChoices[0]
if familySet:
self.selectedFamilyChoices[self.speechServersChoice,
self.speechLanguagesChoice] = i
def _setupFamilies(self):
"""Gets the list of voice variants for the current speech server and
current language.
If there are variants, get the information associated with
each voice variant and add an entry for it to the variants
GtkComboBox list.
"""
self.speechFamiliesModel.clear()
currentLanguage = self.speechLanguagesChoice
i = 0
self.speechFamiliesChoices = []
for family in self.speechFamilies:
lang = family[speechserver.VoiceFamily.LANG]
dialect = family[speechserver.VoiceFamily.DIALECT]
if dialect:
language = lang + '-' + dialect
else:
language = lang
if language != currentLanguage:
continue
name = family[speechserver.VoiceFamily.NAME]
self.speechFamiliesChoices.append(family)
self.speechFamiliesModel.append((i, name))
i += 1
if i == 0:
debug.println(debug.LEVEL_SEVERE, "No speech family was available for %s." % str(currentLanguage))
debug.printStack(debug.LEVEL_FINEST)
self.speechFamiliesChoice = None
return
# If user manually selected a family for the current speech server
# this choice it's restored. In other case the first family
# (usually the default one) is selected
#
selectedIndex = 0
if (self.speechServersChoice, self.speechLanguagesChoice) \
in self.selectedFamilyChoices:
selectedIndex = self.selectedFamilyChoices[self.speechServersChoice,
self.speechLanguagesChoice]
self.get_widget("speechFamilies").set_active(selectedIndex)
def _setSpeechLanguagesChoice(self, languageName):
"""Sets the active item in the languages ("Language:") combo box
to the given language name.
Arguments:
- languageName: the language name to use to set the active combo box item.
"""
print("setSpeechLanguagesChoice")
if len(self.speechLanguagesChoices) == 0:
return
valueSet = False
i = 0
for language in self.speechLanguagesChoices:
if language == languageName:
self.get_widget("speechLanguages").set_active(i)
self.speechLanguagesChoice = self.speechLanguagesChoices[i]
valueSet = True
break
i += 1
if not valueSet:
debug.println(debug.LEVEL_FINEST,
"Could not find speech language match for %s" \
% languageName)
self.get_widget("speechLanguages").set_active(0)
self.speechLanguagesChoice = self.speechLanguagesChoices[0]
if valueSet:
self.selectedLanguageChoices[self.speechServersChoice] = i
self._setupFamilies()
def _setupVoices(self):
"""Gets the list of voices for the current speech server.
If there are families, get the information associated with
each voice family and add an entry for it to the families
GtkComboBox list.
"""
self.speechLanguagesModel.clear()
self.speechFamilies = self.speechServersChoice.getVoiceFamilies()
self.speechLanguagesChoices = []
if len(self.speechFamilies) == 0:
debug.println(debug.LEVEL_SEVERE, "No speech voice was available.")
debug.printStack(debug.LEVEL_FINEST)
self.speechLanguagesChoice = None
return
done = {}
i = 0
for family in self.speechFamilies:
lang = family[speechserver.VoiceFamily.LANG]
dialect = family[speechserver.VoiceFamily.DIALECT]
if (lang,dialect) in done:
continue
done[lang,dialect] = True
if dialect:
language = lang + '-' + dialect
else:
language = lang
# TODO: get translated language name from CLDR or such
msg = language
if msg == "":
# Unsupported locale
msg = "default language"
self.speechLanguagesChoices.append(language)
self.speechLanguagesModel.append((i, msg))
i += 1
# If user manually selected a language for the current speech server
# this choice it's restored. In other case the first language
# (usually the default one) is selected
#
selectedIndex = 0
if self.speechServersChoice in self.selectedLanguageChoices:
selectedIndex = self.selectedLanguageChoices[self.speechServersChoice]
self.get_widget("speechLanguages").set_active(selectedIndex)
if self.initializingSpeech:
self.speechLanguagesChoice = self.speechLanguagesChoices[selectedIndex]
self._setupFamilies()
# The family name will be selected as part of selecting the
# voice type. Whenever the families change, we'll reset the
# voice type selection to the first one ("Default").
#
comboBox = self.get_widget("voiceTypesCombo")
types = [guilabels.SPEECH_VOICE_TYPE_DEFAULT,
guilabels.SPEECH_VOICE_TYPE_UPPERCASE,
guilabels.SPEECH_VOICE_TYPE_HYPERLINK,
guilabels.SPEECH_VOICE_TYPE_SYSTEM]
self.populateComboBox(comboBox, types)
comboBox.set_active(DEFAULT)
voiceType = comboBox.get_active()
self._setVoiceSettingsForVoiceType(voiceType)
def _setSpeechServersChoice(self, serverInfo):
"""Sets the active item in the speech servers combo box to the
given server.
Arguments:
- serversChoices: the list of available speech servers.
- serverInfo: the speech server to use to set the active combo
box item.
"""
if len(self.speechServersChoices) == 0:
return
# We'll fallback to whatever we happen to be using in the event
# that this preference has never been set.
#
if not serverInfo:
serverInfo = speech.getInfo()
valueSet = False
i = 0
for server in self.speechServersChoices:
if serverInfo == server.getInfo():
self.get_widget("speechServers").set_active(i)
self.speechServersChoice = server
valueSet = True
break
i += 1
if not valueSet:
debug.println(debug.LEVEL_FINEST,
"Could not find speech server match for %s" \
% repr(serverInfo))
self.get_widget("speechServers").set_active(0)
self.speechServersChoice = self.speechServersChoices[0]
self._setupVoices()
def _setupSpeechServers(self):
"""Gets the list of speech servers for the current speech factory.
If there are servers, get the information associated with each
speech server and add an entry for it to the speechServers
GtkComboBox list. Set the current choice to be the first item.
"""
self.speechServersModel.clear()
self.speechServersChoices = \
self.speechSystemsChoice.SpeechServer.getSpeechServers()
if len(self.speechServersChoices) == 0:
debug.println(debug.LEVEL_SEVERE, "Speech not available.")
debug.printStack(debug.LEVEL_FINEST)
self.speechServersChoice = None
self.speechLanguagesChoices = []
self.speechLanguagesChoice = None
self.speechFamiliesChoices = []
self.speechFamiliesChoice = None
return
i = 0
for server in self.speechServersChoices:
name = server.getInfo()[0]
self.speechServersModel.append((i, name))
i += 1
self._setSpeechServersChoice(self.prefsDict["speechServerInfo"])
debug.println(
debug.LEVEL_FINEST,
"orca_gui_prefs._setupSpeechServers: speechServersChoice: %s" \
% self.speechServersChoice.getInfo())
def _setSpeechSystemsChoice(self, systemName):
"""Set the active item in the speech systems combo box to the
given system name.
Arguments:
- factoryChoices: the list of available speech factories (systems).
- systemName: the speech system name to use to set the active combo
box item.
"""
systemName = systemName.strip("'")
if len(self.speechSystemsChoices) == 0:
self.speechSystemsChoice = None
return
valueSet = False
i = 0
for speechSystem in self.speechSystemsChoices:
name = speechSystem.__name__
if name.endswith(systemName):
self.get_widget("speechSystems").set_active(i)
self.speechSystemsChoice = self.speechSystemsChoices[i]
valueSet = True
break
i += 1
if not valueSet:
debug.println(debug.LEVEL_FINEST,
"Could not find speech system match for %s" \
% systemName)
self.get_widget("speechSystems").set_active(0)
self.speechSystemsChoice = self.speechSystemsChoices[0]
self._setupSpeechServers()
def _setupSpeechSystems(self, factories):
"""Sets up the speech systems combo box and sets the selection
to the preferred speech system.
Arguments:
-factories: the list of known speech factories (working or not)
"""
self.speechSystemsModel.clear()
self.workingFactories = []
for factory in factories:
try:
servers = factory.SpeechServer.getSpeechServers()
if len(servers):
self.workingFactories.append(factory)
except:
debug.printException(debug.LEVEL_FINEST)
self.speechSystemsChoices = []
if len(self.workingFactories) == 0:
debug.println(debug.LEVEL_SEVERE, "Speech not available.")
debug.printStack(debug.LEVEL_FINEST)
self.speechSystemsChoice = None
self.speechServersChoices = []
self.speechServersChoice = None
self.speechLanguagesChoices = []
self.speechLanguagesChoice = None
self.speechFamiliesChoices = []
self.speechFamiliesChoice = None
return
i = 0
for workingFactory in self.workingFactories:
self.speechSystemsChoices.append(workingFactory)
name = workingFactory.SpeechServer.getFactoryName()
self.speechSystemsModel.append((i, name))
i += 1
if self.prefsDict["speechServerFactory"]:
self._setSpeechSystemsChoice(self.prefsDict["speechServerFactory"])
else:
self.speechSystemsChoice = None
debug.println(
debug.LEVEL_FINEST,
"orca_gui_prefs._setupSpeechSystems: speechSystemsChoice: %s" \
% self.speechSystemsChoice)
def _initSpeechState(self):
"""Initialize the various speech components.
"""
voices = self.prefsDict["voices"]
self.defaultVoice = acss.ACSS(voices.get(settings.DEFAULT_VOICE))
self.uppercaseVoice = acss.ACSS(voices.get(settings.UPPERCASE_VOICE))
self.hyperlinkVoice = acss.ACSS(voices.get(settings.HYPERLINK_VOICE))
self.systemVoice = acss.ACSS(voices.get(settings.SYSTEM_VOICE))
# Just a note on general naming pattern:
#
# * = The name of the combobox
# *Model = the name of the comobox model
# *Choices = the Orca/speech python objects
# *Choice = a value from *Choices
#
# Where * = speechSystems, speechServers, speechLanguages, speechFamilies
#
factories = _settingsManager.getSpeechServerFactories()
if len(factories) == 0 or not self.prefsDict.get('enableSpeech', True):
self.workingFactories = []
self.speechSystemsChoice = None
self.speechServersChoices = []
self.speechServersChoice = None
self.speechLanguagesChoices = []
self.speechLanguagesChoice = None
self.speechFamiliesChoices = []
self.speechFamiliesChoice = None
return
try:
speech.init()
except:
self.workingFactories = []
self.speechSystemsChoice = None
self.speechServersChoices = []
self.speechServersChoice = None
self.speechLanguagesChoices = []
self.speechLanguagesChoice = None
self.speechFamiliesChoices = []
self.speechFamiliesChoice = None
return
# This cascades into systems->servers->voice_type->families...
#
self.initializingSpeech = True
self._setupSpeechSystems(factories)
self.initializingSpeech = False
def _setSpokenTextAttributes(self, view, setAttributes,
state, moveToTop=False):
"""Given a set of spoken text attributes, update the model used by the
text attribute tree view.
Arguments:
- view: the text attribute tree view.
- setAttributes: the list of spoken text attributes to update.
- state: the state (True or False) that they all should be set to.
- moveToTop: if True, move these attributes to the top of the list.
"""
model = view.get_model()
view.set_model(None)
[attrList, attrDict] = \
self.script.utilities.stringToKeysAndDict(setAttributes)
[allAttrList, allAttrDict] = self.script.utilities.stringToKeysAndDict(
_settingsManager.getSetting('allTextAttributes'))
for i in range(0, len(attrList)):
for path in range(0, len(allAttrList)):
localizedKey = text_attribute_names.getTextAttributeName(
attrList[i], self.script)
localizedValue = text_attribute_names.getTextAttributeName(
attrDict[attrList[i]], self.script)
if localizedKey == model[path][NAME]:
thisIter = model.get_iter(path)
model.set_value(thisIter, NAME, localizedKey)
model.set_value(thisIter, IS_SPOKEN, state)
model.set_value(thisIter, VALUE, localizedValue)
if moveToTop:
thisIter = model.get_iter(path)
otherIter = model.get_iter(i)
model.move_before(thisIter, otherIter)
break
view.set_model(model)
def _setBrailledTextAttributes(self, view, setAttributes, state):
"""Given a set of brailled text attributes, update the model used
by the text attribute tree view.
Arguments:
- view: the text attribute tree view.
- setAttributes: the list of brailled text attributes to update.
- state: the state (True or False) that they all should be set to.
"""
model = view.get_model()
view.set_model(None)
[attrList, attrDict] = \
self.script.utilities.stringToKeysAndDict(setAttributes)
[allAttrList, allAttrDict] = self.script.utilities.stringToKeysAndDict(
_settingsManager.getSetting('allTextAttributes'))
for i in range(0, len(attrList)):
for path in range(0, len(allAttrList)):
localizedKey = text_attribute_names.getTextAttributeName(
attrList[i], self.script)
if localizedKey == model[path][NAME]:
thisIter = model.get_iter(path)
model.set_value(thisIter, IS_BRAILLED, state)
break
view.set_model(model)
def _getAppNameForAttribute(self, attributeName):
"""Converts the given Atk attribute name into the application's
equivalent. This is necessary because an application or toolkit
(e.g. Gecko) might invent entirely new names for the same text
attributes.
Arguments:
- attribName: The name of the text attribute
Returns the application's equivalent name if found or attribName
otherwise.
"""
return self.script.utilities.getAppNameForAttribute(attributeName)
def _updateTextDictEntry(self):
"""The user has updated the text attribute list in some way. Update
the "enabledSpokenTextAttributes" and "enabledBrailledTextAttributes"
preference strings to reflect the current state of the corresponding
text attribute lists.
"""
model = self.getTextAttributesView.get_model()
spokenAttrStr = ""
brailledAttrStr = ""
noRows = model.iter_n_children(None)
for path in range(0, noRows):
localizedKey = model[path][NAME]
key = text_attribute_names.getTextAttributeKey(localizedKey)
# Convert the normalized, Atk attribute name back into what
# the app/toolkit uses.
#
key = self._getAppNameForAttribute(key)
localizedValue = model[path][VALUE]
value = text_attribute_names.getTextAttributeKey(localizedValue)
if model[path][IS_SPOKEN]:
spokenAttrStr += key + ":" + value + "; "
if model[path][IS_BRAILLED]:
brailledAttrStr += key + ":" + value + "; "
self.prefsDict["enabledSpokenTextAttributes"] = spokenAttrStr
self.prefsDict["enabledBrailledTextAttributes"] = brailledAttrStr
def contractedBrailleToggled(self, checkbox):
grid = self.get_widget('contractionTableGrid')
grid.set_sensitive(checkbox.get_active())
self.prefsDict["enableContractedBraille"] = checkbox.get_active()
def contractionTableComboChanged(self, combobox):
model = combobox.get_model()
myIter = combobox.get_active_iter()
self.prefsDict["brailleContractionTable"] = model[myIter][1]
def flashPersistenceToggled(self, checkbox):
grid = self.get_widget('flashMessageDurationGrid')
grid.set_sensitive(not checkbox.get_active())
self.prefsDict["flashIsPersistent"] = checkbox.get_active()
def textAttributeSpokenToggled(self, cell, path, model):
"""The user has toggled the state of one of the text attribute
checkboxes to be spoken. Update our model to reflect this, then
update the "enabledSpokenTextAttributes" preference string.
Arguments:
- cell: the cell that changed.
- path: the path of that cell.
- model: the model that the cell is part of.
"""
thisIter = model.get_iter(path)
model.set(thisIter, IS_SPOKEN, not model[path][IS_SPOKEN])
self._updateTextDictEntry()
def textAttributeBrailledToggled(self, cell, path, model):
"""The user has toggled the state of one of the text attribute
checkboxes to be brailled. Update our model to reflect this,
then update the "enabledBrailledTextAttributes" preference string.
Arguments:
- cell: the cell that changed.
- path: the path of that cell.
- model: the model that the cell is part of.
"""
thisIter = model.get_iter(path)
model.set(thisIter, IS_BRAILLED, not model[path][IS_BRAILLED])
self._updateTextDictEntry()
def textAttrValueEdited(self, cell, path, new_text, model):
"""The user has edited the value of one of the text attributes.
Update our model to reflect this, then update the
"enabledSpokenTextAttributes" and "enabledBrailledTextAttributes"
preference strings.
Arguments:
- cell: the cell that changed.
- path: the path of that cell.
- new_text: the new text attribute value string.
- model: the model that the cell is part of.
"""
thisIter = model.get_iter(path)
model.set(thisIter, VALUE, new_text)
self._updateTextDictEntry()
def textAttrCursorChanged(self, widget):
"""Set the search column in the text attribute tree view
depending upon which column the user currently has the cursor in.
"""
[path, focusColumn] = self.getTextAttributesView.get_cursor()
if focusColumn:
noColumns = len(self.getTextAttributesView.get_columns())
for i in range(0, noColumns):
col = self.getTextAttributesView.get_column(i)
if focusColumn == col:
self.getTextAttributesView.set_search_column(i)
break
def _createTextAttributesTreeView(self):
"""Create the text attributes tree view. The view is the
textAttributesTreeView GtkTreeView widget. The view will consist
of a list containing three columns:
IS_SPOKEN - a checkbox whose state indicates whether this text
attribute will be spoken or not.
NAME - the text attribute name.
VALUE - if set, (and this attributes is enabled for speaking),
then this attribute will be spoken unless it equals
this value.
"""
self.getTextAttributesView = self.get_widget("textAttributesTreeView")
if self.getTextAttributesView.get_columns():
for column in self.getTextAttributesView.get_columns():
self.getTextAttributesView.remove_column(column)
model = Gtk.ListStore(GObject.TYPE_STRING,
GObject.TYPE_BOOLEAN,
GObject.TYPE_BOOLEAN,
GObject.TYPE_STRING)
# Initially setup the list store model based on the values of all
# the known text attributes.
#
[allAttrList, allAttrDict] = self.script.utilities.stringToKeysAndDict(
_settingsManager.getSetting('allTextAttributes'))
for i in range(0, len(allAttrList)):
thisIter = model.append()
localizedKey = text_attribute_names.getTextAttributeName(
allAttrList[i], self.script)
localizedValue = text_attribute_names.getTextAttributeName(
allAttrDict[allAttrList[i]], self.script)
model.set_value(thisIter, NAME, localizedKey)
model.set_value(thisIter, IS_SPOKEN, False)
model.set_value(thisIter, IS_BRAILLED, False)
model.set_value(thisIter, VALUE, localizedValue)
self.getTextAttributesView.set_model(model)
# Attribute Name column (NAME).
column = Gtk.TreeViewColumn(guilabels.TEXT_ATTRIBUTE_NAME)
column.set_min_width(250)
column.set_resizable(True)
renderer = Gtk.CellRendererText()
column.pack_end(renderer, True)
column.add_attribute(renderer, 'text', NAME)
self.getTextAttributesView.insert_column(column, 0)
# Attribute Speak column (IS_SPOKEN).
speakAttrColumnLabel = guilabels.PRESENTATION_SPEAK
column = Gtk.TreeViewColumn(speakAttrColumnLabel)
renderer = Gtk.CellRendererToggle()
column.pack_start(renderer, False)
column.add_attribute(renderer, 'active', IS_SPOKEN)
renderer.connect("toggled",
self.textAttributeSpokenToggled,
model)
self.getTextAttributesView.insert_column(column, 1)
column.clicked()
# Attribute Mark in Braille column (IS_BRAILLED).
markAttrColumnLabel = guilabels.PRESENTATION_MARK_IN_BRAILLE
column = Gtk.TreeViewColumn(markAttrColumnLabel)
renderer = Gtk.CellRendererToggle()
column.pack_start(renderer, False)
column.add_attribute(renderer, 'active', IS_BRAILLED)
renderer.connect("toggled",
self.textAttributeBrailledToggled,
model)
self.getTextAttributesView.insert_column(column, 2)
column.clicked()
# Attribute Value column (VALUE)
column = Gtk.TreeViewColumn(guilabels.PRESENTATION_PRESENT_UNLESS)
renderer = Gtk.CellRendererText()
renderer.set_property('editable', True)
column.pack_end(renderer, True)
column.add_attribute(renderer, 'text', VALUE)
renderer.connect("edited", self.textAttrValueEdited, model)
self.getTextAttributesView.insert_column(column, 4)
# Check all the enabled (spoken) text attributes.
#
self._setSpokenTextAttributes(
self.getTextAttributesView,
_settingsManager.getSetting('enabledSpokenTextAttributes'),
True, True)
# Check all the enabled (brailled) text attributes.
#
self._setBrailledTextAttributes(
self.getTextAttributesView,
_settingsManager.getSetting('enabledBrailledTextAttributes'),
True)
# Connect a handler for when the user changes columns within the
# view, so that we can adjust the search column for item lookups.
#
self.getTextAttributesView.connect("cursor_changed",
self.textAttrCursorChanged)
def pronActualValueEdited(self, cell, path, new_text, model):
"""The user has edited the value of one of the actual strings in
the pronunciation dictionary. Update our model to reflect this.
Arguments:
- cell: the cell that changed.
- path: the path of that cell.
- new_text: the new pronunciation dictionary actual string.
- model: the model that the cell is part of.
"""
thisIter = model.get_iter(path)
model.set(thisIter, ACTUAL, new_text)
def pronReplacementValueEdited(self, cell, path, new_text, model):
"""The user has edited the value of one of the replacement strings
in the pronunciation dictionary. Update our model to reflect this.
Arguments:
- cell: the cell that changed.
- path: the path of that cell.
- new_text: the new pronunciation dictionary replacement string.
- model: the model that the cell is part of.
"""
thisIter = model.get_iter(path)
model.set(thisIter, REPLACEMENT, new_text)
def pronunciationFocusChange(self, widget, event, isFocused):
"""Callback for the pronunciation tree's focus-{in,out}-event signal."""
_settingsManager.setSetting('usePronunciationDictionary', not isFocused)
def pronunciationCursorChanged(self, widget):
"""Set the search column in the pronunciation dictionary tree view
depending upon which column the user currently has the cursor in.
"""
[path, focusColumn] = self.pronunciationView.get_cursor()
if focusColumn:
noColumns = len(self.pronunciationView.get_columns())
for i in range(0, noColumns):
col = self.pronunciationView.get_column(i)
if focusColumn == col:
self.pronunciationView.set_search_column(i)
break
def _createPronunciationTreeView(self):
"""Create the pronunciation dictionary tree view. The view is the
pronunciationTreeView GtkTreeView widget. The view will consist
of a list containing two columns:
ACTUAL - the actual text string (word).
REPLACEMENT - the string that is used to pronounce that word.
"""
self.pronunciationView = self.get_widget("pronunciationTreeView")
if self.pronunciationView.get_columns():
for column in self.pronunciationView.get_columns():
self.pronunciationView.remove_column(column)
model = Gtk.ListStore(GObject.TYPE_STRING,
GObject.TYPE_STRING)
# Initially setup the list store model based on the values of all
# existing entries in the pronunciation dictionary -- unless it's
# the default script.
#
if not self.script.app:
_profile = self.prefsDict.get('activeProfile')[1]
pronDict = _settingsManager.getPronunciations(_profile)
else:
pronDict = pronunciation_dict.pronunciation_dict
for pronKey in sorted(pronDict.keys()):
thisIter = model.append()
try:
actual, replacement = pronDict[pronKey]
except:
# Try to do something sensible for the previous format of
# pronunciation dictionary entries. See bug #464754 for
# more details.
#
actual = pronKey
replacement = pronDict[pronKey]
model.set(thisIter,
ACTUAL, actual,
REPLACEMENT, replacement)
self.pronunciationView.set_model(model)
# Pronunciation Dictionary actual string (word) column (ACTUAL).
column = Gtk.TreeViewColumn(guilabels.DICTIONARY_ACTUAL_STRING)
column.set_min_width(250)
column.set_resizable(True)
renderer = Gtk.CellRendererText()
renderer.set_property('editable', True)
column.pack_end(renderer, True)
column.add_attribute(renderer, 'text', ACTUAL)
renderer.connect("edited", self.pronActualValueEdited, model)
self.pronunciationView.insert_column(column, 0)
# Pronunciation Dictionary replacement string column (REPLACEMENT)
column = Gtk.TreeViewColumn(guilabels.DICTIONARY_REPLACEMENT_STRING)
renderer = Gtk.CellRendererText()
renderer.set_property('editable', True)
column.pack_end(renderer, True)
column.add_attribute(renderer, 'text', REPLACEMENT)
renderer.connect("edited", self.pronReplacementValueEdited, model)
self.pronunciationView.insert_column(column, 1)
self.pronunciationModel = model
# Connect a handler for when the user changes columns within the
# view, so that we can adjust the search column for item lookups.
#
self.pronunciationView.connect("cursor_changed",
self.pronunciationCursorChanged)
self.pronunciationView.connect(
"focus_in_event", self.pronunciationFocusChange, True)
self.pronunciationView.connect(
"focus_out_event", self.pronunciationFocusChange, False)
def _initGUIState(self):
"""Adjust the settings of the various components on the
configuration GUI depending upon the users preferences.
"""
prefs = self.prefsDict
# Speech pane.
#
enable = prefs["enableSpeech"]
self.get_widget("speechSupportCheckButton").set_active(enable)
self.get_widget("speechOptionsGrid").set_sensitive(enable)
enable = prefs["onlySpeakDisplayedText"]
self.get_widget("onlySpeakDisplayedTextCheckButton").set_active(enable)
self.get_widget("contextOptionsGrid").set_sensitive(not enable)
if prefs["verbalizePunctuationStyle"] == \
settings.PUNCTUATION_STYLE_NONE:
self.get_widget("noneButton").set_active(True)
elif prefs["verbalizePunctuationStyle"] == \
settings.PUNCTUATION_STYLE_SOME:
self.get_widget("someButton").set_active(True)
elif prefs["verbalizePunctuationStyle"] == \
settings.PUNCTUATION_STYLE_MOST:
self.get_widget("mostButton").set_active(True)
else:
self.get_widget("allButton").set_active(True)
if prefs["speechVerbosityLevel"] == settings.VERBOSITY_LEVEL_BRIEF:
self.get_widget("speechBriefButton").set_active(True)
else:
self.get_widget("speechVerboseButton").set_active(True)
self.get_widget("onlySpeakDisplayedTextCheckButton").set_active(
prefs["onlySpeakDisplayedText"])
self.get_widget("enableSpeechIndentationCheckButton").set_active(\
prefs["enableSpeechIndentation"])
self.get_widget("speakBlankLinesCheckButton").set_active(\
prefs["speakBlankLines"])
self.get_widget("speakMultiCaseStringsAsWordsCheckButton").set_active(\
prefs["speakMultiCaseStringsAsWords"])
self.get_widget("speakNumbersAsDigitsCheckButton").set_active(
prefs.get("speakNumbersAsDigits", settings.speakNumbersAsDigits))
self.get_widget("enableTutorialMessagesCheckButton").set_active(\
prefs["enableTutorialMessages"])
self.get_widget("enablePauseBreaksCheckButton").set_active(\
prefs["enablePauseBreaks"])
self.get_widget("enablePositionSpeakingCheckButton").set_active(\
prefs["enablePositionSpeaking"])
self.get_widget("enableMnemonicSpeakingCheckButton").set_active(\
prefs["enableMnemonicSpeaking"])
self.get_widget("speakMisspelledIndicatorCheckButton").set_active(
prefs.get("speakMisspelledIndicator", settings.speakMisspelledIndicator))
self.get_widget("speakDescriptionCheckButton").set_active(
prefs.get("speakDescription", settings.speakDescription))
self.get_widget("speakContextBlockquoteCheckButton").set_active(
prefs.get("speakContextBlockquote", settings.speakContextList))
self.get_widget("speakContextLandmarkCheckButton").set_active(
prefs.get("speakContextLandmark", settings.speakContextLandmark))
self.get_widget("speakContextNonLandmarkFormCheckButton").set_active(
prefs.get("speakContextNonLandmarkForm", settings.speakContextNonLandmarkForm))
self.get_widget("speakContextListCheckButton").set_active(
prefs.get("speakContextList", settings.speakContextList))
self.get_widget("speakContextPanelCheckButton").set_active(
prefs.get("speakContextPanel", settings.speakContextPanel))
self.get_widget("speakContextTableCheckButton").set_active(
prefs.get("speakContextTable", settings.speakContextTable))
enable = prefs.get("messagesAreDetailed", settings.messagesAreDetailed)
self.get_widget("messagesAreDetailedCheckButton").set_active(enable)
enable = prefs.get("useColorNames", settings.useColorNames)
self.get_widget("useColorNamesCheckButton").set_active(enable)
enable = prefs.get("readFullRowInGUITable", settings.readFullRowInGUITable)
self.get_widget("readFullRowInGUITableCheckButton").set_active(enable)
enable = prefs.get("readFullRowInDocumentTable", settings.readFullRowInDocumentTable)
self.get_widget("readFullRowInDocumentTableCheckButton").set_active(enable)
enable = prefs.get("readFullRowInSpreadSheet", settings.readFullRowInSpreadSheet)
self.get_widget("readFullRowInSpreadSheetCheckButton").set_active(enable)
style = prefs.get("capitalizationStyle", settings.capitalizationStyle)
combobox = self.get_widget("capitalizationStyle")
options = [guilabels.CAPITALIZATION_STYLE_NONE,
guilabels.CAPITALIZATION_STYLE_ICON,
guilabels.CAPITALIZATION_STYLE_SPELL]
self.populateComboBox(combobox, options)
if style == settings.CAPITALIZATION_STYLE_ICON:
value = guilabels.CAPITALIZATION_STYLE_ICON
elif style == settings.CAPITALIZATION_STYLE_SPELL:
value = guilabels.CAPITALIZATION_STYLE_SPELL
else:
value = guilabels.CAPITALIZATION_STYLE_NONE
combobox.set_active(options.index(value))
combobox2 = self.get_widget("dateFormatCombo")
sdtime = time.strftime
ltime = time.localtime
self.populateComboBox(combobox2,
[sdtime(messages.DATE_FORMAT_LOCALE, ltime()),
sdtime(messages.DATE_FORMAT_NUMBERS_DM, ltime()),
sdtime(messages.DATE_FORMAT_NUMBERS_MD, ltime()),
sdtime(messages.DATE_FORMAT_NUMBERS_DMY, ltime()),
sdtime(messages.DATE_FORMAT_NUMBERS_MDY, ltime()),
sdtime(messages.DATE_FORMAT_NUMBERS_YMD, ltime()),
sdtime(messages.DATE_FORMAT_FULL_DM, ltime()),
sdtime(messages.DATE_FORMAT_FULL_MD, ltime()),
sdtime(messages.DATE_FORMAT_FULL_DMY, ltime()),
sdtime(messages.DATE_FORMAT_FULL_MDY, ltime()),
sdtime(messages.DATE_FORMAT_FULL_YMD, ltime()),
sdtime(messages.DATE_FORMAT_ABBREVIATED_DM, ltime()),
sdtime(messages.DATE_FORMAT_ABBREVIATED_MD, ltime()),
sdtime(messages.DATE_FORMAT_ABBREVIATED_DMY, ltime()),
sdtime(messages.DATE_FORMAT_ABBREVIATED_MDY, ltime()),
sdtime(messages.DATE_FORMAT_ABBREVIATED_YMD, ltime())
])
indexdate = DATE_FORMAT_LOCALE
dateFormat = self.prefsDict["presentDateFormat"]
if dateFormat == messages.DATE_FORMAT_LOCALE:
indexdate = DATE_FORMAT_LOCALE
elif dateFormat == messages.DATE_FORMAT_NUMBERS_DM:
indexdate = DATE_FORMAT_NUMBERS_DM
elif dateFormat == messages.DATE_FORMAT_NUMBERS_MD:
indexdate = DATE_FORMAT_NUMBERS_MD
elif dateFormat == messages.DATE_FORMAT_NUMBERS_DMY:
indexdate = DATE_FORMAT_NUMBERS_DMY
elif dateFormat == messages.DATE_FORMAT_NUMBERS_MDY:
indexdate = DATE_FORMAT_NUMBERS_MDY
elif dateFormat == messages.DATE_FORMAT_NUMBERS_YMD:
indexdate = DATE_FORMAT_NUMBERS_YMD
elif dateFormat == messages.DATE_FORMAT_FULL_DM:
indexdate = DATE_FORMAT_FULL_DM
elif dateFormat == messages.DATE_FORMAT_FULL_MD:
indexdate = DATE_FORMAT_FULL_MD
elif dateFormat == messages.DATE_FORMAT_FULL_DMY:
indexdate = DATE_FORMAT_FULL_DMY
elif dateFormat == messages.DATE_FORMAT_FULL_MDY:
indexdate = DATE_FORMAT_FULL_MDY
elif dateFormat == messages.DATE_FORMAT_FULL_YMD:
indexdate = DATE_FORMAT_FULL_YMD
elif dateFormat == messages.DATE_FORMAT_ABBREVIATED_DM:
indexdate = DATE_FORMAT_ABBREVIATED_DM
elif dateFormat == messages.DATE_FORMAT_ABBREVIATED_MD:
indexdate = DATE_FORMAT_ABBREVIATED_MD
elif dateFormat == messages.DATE_FORMAT_ABBREVIATED_DMY:
indexdate = DATE_FORMAT_ABBREVIATED_DMY
elif dateFormat == messages.DATE_FORMAT_ABBREVIATED_MDY:
indexdate = DATE_FORMAT_ABBREVIATED_MDY
elif dateFormat == messages.DATE_FORMAT_ABBREVIATED_YMD:
indexdate = DATE_FORMAT_ABBREVIATED_YMD
combobox2.set_active (indexdate)
combobox3 = self.get_widget("timeFormatCombo")
self.populateComboBox(combobox3,
[sdtime(messages.TIME_FORMAT_LOCALE, ltime()),
sdtime(messages.TIME_FORMAT_12_HM, ltime()),
sdtime(messages.TIME_FORMAT_12_HMS, ltime()),
sdtime(messages.TIME_FORMAT_24_HMS, ltime()),
sdtime(messages.TIME_FORMAT_24_HMS_WITH_WORDS, ltime()),
sdtime(messages.TIME_FORMAT_24_HM, ltime()),
sdtime(messages.TIME_FORMAT_24_HM_WITH_WORDS, ltime())])
indextime = TIME_FORMAT_LOCALE
timeFormat = self.prefsDict["presentTimeFormat"]
if timeFormat == messages.TIME_FORMAT_LOCALE:
indextime = TIME_FORMAT_LOCALE
elif timeFormat == messages.TIME_FORMAT_12_HM:
indextime = TIME_FORMAT_12_HM
elif timeFormat == messages.TIME_FORMAT_12_HMS:
indextime = TIME_FORMAT_12_HMS
elif timeFormat == messages.TIME_FORMAT_24_HMS:
indextime = TIME_FORMAT_24_HMS
elif timeFormat == messages.TIME_FORMAT_24_HMS_WITH_WORDS:
indextime = TIME_FORMAT_24_HMS_WITH_WORDS
elif timeFormat == messages.TIME_FORMAT_24_HM:
indextime = TIME_FORMAT_24_HM
elif timeFormat == messages.TIME_FORMAT_24_HM_WITH_WORDS:
indextime = TIME_FORMAT_24_HM_WITH_WORDS
combobox3.set_active (indextime)
self.get_widget("speakProgressBarUpdatesCheckButton").set_active(
prefs.get("speakProgressBarUpdates", settings.speakProgressBarUpdates))
self.get_widget("brailleProgressBarUpdatesCheckButton").set_active(
prefs.get("brailleProgressBarUpdates", settings.brailleProgressBarUpdates))
self.get_widget("beepProgressBarUpdatesCheckButton").set_active(
prefs.get("beepProgressBarUpdates", settings.beepProgressBarUpdates))
interval = prefs["progressBarUpdateInterval"]
self.get_widget("progressBarUpdateIntervalSpinButton").set_value(interval)
comboBox = self.get_widget("progressBarVerbosity")
levels = [guilabels.PROGRESS_BAR_ALL,
guilabels.PROGRESS_BAR_APPLICATION,
guilabels.PROGRESS_BAR_WINDOW]
self.populateComboBox(comboBox, levels)
comboBox.set_active(prefs["progressBarVerbosity"])
enable = prefs["enableMouseReview"]
self.get_widget("enableMouseReviewCheckButton").set_active(enable)
# Braille pane.
#
self.get_widget("enableBrailleCheckButton").set_active( \
prefs["enableBraille"])
state = prefs["brailleRolenameStyle"] == \
settings.BRAILLE_ROLENAME_STYLE_SHORT
self.get_widget("abbrevRolenames").set_active(state)
self.get_widget("disableBrailleEOLCheckButton").set_active(
prefs["disableBrailleEOL"])
if louis is None:
self.get_widget( \
"contractedBrailleCheckButton").set_sensitive(False)
else:
self.get_widget("contractedBrailleCheckButton").set_active( \
prefs["enableContractedBraille"])
# Set up contraction table combo box and set it to the
# currently used one.
#
tablesCombo = self.get_widget("contractionTableCombo")
tableDict = braille.listTables()
selectedTableIter = None
selectedTable = prefs["brailleContractionTable"] or \
braille.getDefaultTable()
if tableDict:
tablesModel = Gtk.ListStore(str, str)
names = sorted(tableDict.keys())
for name in names:
fname = tableDict[name]
it = tablesModel.append([name, fname])
if os.path.join(braille.tablesdir, fname) == \
selectedTable:
selectedTableIter = it
cell = self.planeCellRendererText
tablesCombo.clear()
tablesCombo.pack_start(cell, True)
tablesCombo.add_attribute(cell, 'text', 0)
tablesCombo.set_model(tablesModel)
if selectedTableIter:
tablesCombo.set_active_iter(selectedTableIter)
else:
tablesCombo.set_active(0)
else:
tablesCombo.set_sensitive(False)
if prefs["brailleVerbosityLevel"] == settings.VERBOSITY_LEVEL_BRIEF:
self.get_widget("brailleBriefButton").set_active(True)
else:
self.get_widget("brailleVerboseButton").set_active(True)
self.get_widget("enableBrailleWordWrapCheckButton").set_active(
prefs.get("enableBrailleWordWrap", settings.enableBrailleWordWrap))
selectionIndicator = prefs["brailleSelectorIndicator"]
if selectionIndicator == settings.BRAILLE_UNDERLINE_7:
self.get_widget("brailleSelection7Button").set_active(True)
elif selectionIndicator == settings.BRAILLE_UNDERLINE_8:
self.get_widget("brailleSelection8Button").set_active(True)
elif selectionIndicator == settings.BRAILLE_UNDERLINE_BOTH:
self.get_widget("brailleSelectionBothButton").set_active(True)
else:
self.get_widget("brailleSelectionNoneButton").set_active(True)
linkIndicator = prefs["brailleLinkIndicator"]
if linkIndicator == settings.BRAILLE_UNDERLINE_7:
self.get_widget("brailleLink7Button").set_active(True)
elif linkIndicator == settings.BRAILLE_UNDERLINE_8:
self.get_widget("brailleLink8Button").set_active(True)
elif linkIndicator == settings.BRAILLE_UNDERLINE_BOTH:
self.get_widget("brailleLinkBothButton").set_active(True)
else:
self.get_widget("brailleLinkNoneButton").set_active(True)
enable = prefs.get("enableFlashMessages", settings.enableFlashMessages)
self.get_widget("enableFlashMessagesCheckButton").set_active(enable)
enable = prefs.get("flashIsPersistent", settings.flashIsPersistent)
self.get_widget("flashIsPersistentCheckButton").set_active(enable)
enable = prefs.get("flashIsDetailed", settings.flashIsDetailed)
self.get_widget("flashIsDetailedCheckButton").set_active(enable)
duration = prefs["brailleFlashTime"]
self.get_widget("brailleFlashTimeSpinButton").set_value(duration / 1000)
# Key Echo pane.
#
self.get_widget("keyEchoCheckButton").set_active( \
prefs["enableKeyEcho"])
self.get_widget("enableAlphabeticKeysCheckButton").set_active(
prefs.get("enableAlphabeticKeys", settings.enableAlphabeticKeys))
self.get_widget("enableNumericKeysCheckButton").set_active(
prefs.get("enableNumericKeys", settings.enableNumericKeys))
self.get_widget("enablePunctuationKeysCheckButton").set_active(
prefs.get("enablePunctuationKeys", settings.enablePunctuationKeys))
self.get_widget("enableSpaceCheckButton").set_active(
prefs.get("enableSpace", settings.enableSpace))
self.get_widget("enableModifierKeysCheckButton").set_active( \
prefs["enableModifierKeys"])
self.get_widget("enableFunctionKeysCheckButton").set_active( \
prefs["enableFunctionKeys"])
self.get_widget("enableActionKeysCheckButton").set_active( \
prefs["enableActionKeys"])
self.get_widget("enableNavigationKeysCheckButton").set_active( \
prefs["enableNavigationKeys"])
self.get_widget("enableDiacriticalKeysCheckButton").set_active( \
prefs["enableDiacriticalKeys"])
self.get_widget("enableEchoByCharacterCheckButton").set_active( \
prefs["enableEchoByCharacter"])
self.get_widget("enableEchoByWordCheckButton").set_active( \
prefs["enableEchoByWord"])
self.get_widget("enableEchoBySentenceCheckButton").set_active( \
prefs["enableEchoBySentence"])
# Text attributes pane.
#
self._createTextAttributesTreeView()
brailleIndicator = prefs["textAttributesBrailleIndicator"]
if brailleIndicator == settings.BRAILLE_UNDERLINE_7:
self.get_widget("textBraille7Button").set_active(True)
elif brailleIndicator == settings.BRAILLE_UNDERLINE_8:
self.get_widget("textBraille8Button").set_active(True)
elif brailleIndicator == settings.BRAILLE_UNDERLINE_BOTH:
self.get_widget("textBrailleBothButton").set_active(True)
else:
self.get_widget("textBrailleNoneButton").set_active(True)
# Pronunciation dictionary pane.
#
self._createPronunciationTreeView()
# General pane.
#
self.get_widget("presentToolTipsCheckButton").set_active(
prefs["presentToolTips"])
if prefs["keyboardLayout"] == settings.GENERAL_KEYBOARD_LAYOUT_DESKTOP:
self.get_widget("generalDesktopButton").set_active(True)
else:
self.get_widget("generalLaptopButton").set_active(True)
combobox = self.get_widget("sayAllStyle")
self.populateComboBox(combobox, [guilabels.SAY_ALL_STYLE_LINE,
guilabels.SAY_ALL_STYLE_SENTENCE])
combobox.set_active(prefs["sayAllStyle"])
self.get_widget("rewindAndFastForwardInSayAllCheckButton").set_active(
prefs.get("rewindAndFastForwardInSayAll", settings.rewindAndFastForwardInSayAll))
self.get_widget("structNavInSayAllCheckButton").set_active(
prefs.get("structNavInSayAll", settings.structNavInSayAll))
self.get_widget("sayAllContextBlockquoteCheckButton").set_active(
prefs.get("sayAllContextBlockquote", settings.sayAllContextBlockquote))
self.get_widget("sayAllContextLandmarkCheckButton").set_active(
prefs.get("sayAllContextLandmark", settings.sayAllContextLandmark))
self.get_widget("sayAllContextNonLandmarkFormCheckButton").set_active(
prefs.get("sayAllContextNonLandmarkForm", settings.sayAllContextNonLandmarkForm))
self.get_widget("sayAllContextListCheckButton").set_active(
prefs.get("sayAllContextList", settings.sayAllContextList))
self.get_widget("sayAllContextPanelCheckButton").set_active(
prefs.get("sayAllContextPanel", settings.sayAllContextPanel))
self.get_widget("sayAllContextTableCheckButton").set_active(
prefs.get("sayAllContextTable", settings.sayAllContextTable))
# Orca User Profiles
#
self.profilesCombo = self.get_widget('availableProfilesComboBox1')
self.startingProfileCombo = self.get_widget('availableProfilesComboBox2')
self.profilesComboModel = self.get_widget('model9')
self.__initProfileCombo()
if self.script.app:
self.get_widget('profilesFrame').set_sensitive(False)
def __initProfileCombo(self):
"""Adding available profiles and setting active as the active one"""
availableProfiles = self.__getAvailableProfiles()
self.profilesComboModel.clear()
if not len(availableProfiles):
self.profilesComboModel.append(self._defaultProfile)
else:
for profile in availableProfiles:
self.profilesComboModel.append(profile)
activeProfile = self.prefsDict.get('activeProfile') or self._defaultProfile
startingProfile = self.prefsDict.get('startingProfile') or self._defaultProfile
activeProfileIter = self.getComboBoxIndex(self.profilesCombo,
activeProfile[0])
startingProfileIter = self.getComboBoxIndex(self.startingProfileCombo,
startingProfile[0])
self.profilesCombo.set_active(activeProfileIter)
self.startingProfileCombo.set_active(startingProfileIter)
def __getAvailableProfiles(self):
"""Get available user profiles."""
return _settingsManager.availableProfiles()
def _updateOrcaModifier(self):
combobox = self.get_widget("orcaModifierComboBox")
keystring = ", ".join(self.prefsDict["orcaModifierKeys"])
combobox.set_active(self.getComboBoxIndex(combobox, keystring))
def populateComboBox(self, combobox, items):
"""Populates the combobox with the items provided.
Arguments:
- combobox: the GtkComboBox to populate
- items: the list of strings with which to populate it
"""
model = Gtk.ListStore(str)
for item in items:
model.append([item])
combobox.set_model(model)
def getComboBoxIndex(self, combobox, searchStr, col=0):
""" For each of the entries in the given combo box, look for searchStr.
Return the index of the entry if searchStr is found.
Arguments:
- combobox: the GtkComboBox to search.
- searchStr: the string to search for.
Returns the index of the first entry in combobox with searchStr, or
0 if not found.
"""
model = combobox.get_model()
myiter = model.get_iter_first()
for i in range(0, len(model)):
name = model.get_value(myiter, col)
if name == searchStr:
return i
myiter = model.iter_next(myiter)
return 0
def getComboBoxList(self, combobox):
"""Get the list of values from the active combox
"""
active = combobox.get_active()
model = combobox.get_model()
activeIter = model.get_iter(active)
activeLabel = model.get_value(activeIter, 0)
activeName = model.get_value(activeIter, 1)
return [activeLabel, activeName]
def getKeyBindingsModelDict(self, model, modifiedOnly=True):
modelDict = {}
node = model.get_iter_first()
while node:
child = model.iter_children(node)
while child:
key, modified = model.get(child, HANDLER, MODIF)
if modified or not modifiedOnly:
value = []
value.append(list(model.get(
child, KEY1, MOD_MASK1, MOD_USED1, CLICK_COUNT1)))
modelDict[key] = value
child = model.iter_next(child)
node = model.iter_next(node)
return modelDict
def getModelDict(self, model):
"""Get the list of values from a list[str,str] model
"""
pronunciation_dict.pronunciation_dict = {}
currentIter = model.get_iter_first()
while currentIter is not None:
key, value = model.get(currentIter, ACTUAL, REPLACEMENT)
if key and value:
pronunciation_dict.setPronunciation(key, value)
currentIter = model.iter_next(currentIter)
modelDict = pronunciation_dict.pronunciation_dict
return modelDict
def showGUI(self):
"""Show the Orca configuration GUI window. This assumes that
the GUI has already been created.
"""
orcaSetupWindow = self.get_widget("orcaSetupWindow")
accelGroup = Gtk.AccelGroup()
orcaSetupWindow.add_accel_group(accelGroup)
helpButton = self.get_widget("helpButton")
(keyVal, modifierMask) = Gtk.accelerator_parse("F1")
helpButton.add_accelerator("clicked",
accelGroup,
keyVal,
modifierMask,
0)
try:
ts = orca_state.lastInputEvent.timestamp
except:
ts = 0
if ts == 0:
ts = Gtk.get_current_event_time()
orcaSetupWindow.present_with_time(ts)
# We always want to re-order the text attributes page so that enabled
# items are consistently at the top.
#
self._setSpokenTextAttributes(
self.getTextAttributesView,
_settingsManager.getSetting('enabledSpokenTextAttributes'),
True, True)
if self.script.app:
title = guilabels.PREFERENCES_APPLICATION_TITLE % self.script.app.name
orcaSetupWindow.set_title(title)
orcaSetupWindow.show()
def _initComboBox(self, combobox):
"""Initialize the given combo box to take a list of int/str pairs.
Arguments:
- combobox: the GtkComboBox to initialize.
"""
cell = Gtk.CellRendererText()
combobox.pack_start(cell, True)
# We only want to display one column; not two.
#
try:
columnToDisplay = combobox.get_cells()[0]
combobox.add_attribute(columnToDisplay, 'text', 1)
except:
combobox.add_attribute(cell, 'text', 1)
model = Gtk.ListStore(int, str)
combobox.set_model(model)
# Force the display comboboxes to be left aligned.
#
if isinstance(combobox, Gtk.ComboBoxText):
size = combobox.size_request()
cell.set_fixed_size(size[0] - 29, -1)
return model
def _setKeyEchoItems(self):
"""[In]sensitize the checkboxes for the various types of key echo,
depending upon whether the value of the key echo check button is set.
"""
enable = self.get_widget("keyEchoCheckButton").get_active()
self.get_widget("enableAlphabeticKeysCheckButton").set_sensitive(enable)
self.get_widget("enableNumericKeysCheckButton").set_sensitive(enable)
self.get_widget("enablePunctuationKeysCheckButton").set_sensitive(enable)
self.get_widget("enableSpaceCheckButton").set_sensitive(enable)
self.get_widget("enableModifierKeysCheckButton").set_sensitive(enable)
self.get_widget("enableFunctionKeysCheckButton").set_sensitive(enable)
self.get_widget("enableActionKeysCheckButton").set_sensitive(enable)
self.get_widget("enableNavigationKeysCheckButton").set_sensitive(enable)
self.get_widget("enableDiacriticalKeysCheckButton").set_sensitive( \
enable)
def _presentMessage(self, text, interrupt=False):
"""If the text field is not None, presents the given text, optionally
interrupting anything currently being spoken.
Arguments:
- text: the text to present
- interrupt: if True, interrupt any speech currently being spoken
"""
self.script.speakMessage(text, interrupt=interrupt)
try:
self.script.displayBrailleMessage(text, flashTime=-1)
except:
pass
def _createNode(self, appName):
"""Create a new root node in the TreeStore model with the name of the
application.
Arguments:
- appName: the name of the TreeStore Node (the same of the application)
"""
model = self.keyBindingsModel
myiter = model.append(None)
model.set_value(myiter, DESCRIP, appName)
model.set_value(myiter, MODIF, False)
return myiter
def _getIterOf(self, appName):
"""Returns the Gtk.TreeIter of the TreeStore model
that matches the application name passed as argument
Arguments:
- appName: a string with the name of the application of the node wanted
it's the same that the field DESCRIP of the model treeStore
"""
model = self.keyBindingsModel
for row in model:
if ((model.iter_depth(row.iter) == 0) \
and (row[DESCRIP] == appName)):
return row.iter
return None
def _clickCountToString(self, clickCount):
"""Given a numeric clickCount, returns a string for inclusion
in the list of keybindings.
Argument:
- clickCount: the number of clicks associated with the keybinding.
"""
clickCountString = ""
if clickCount == 2:
clickCountString = " (%s)" % guilabels.CLICK_COUNT_DOUBLE
elif clickCount == 3:
clickCountString = " (%s)" % guilabels.CLICK_COUNT_TRIPLE
return clickCountString
def _insertRow(self, handl, kb, parent=None, modif=False):
"""Appends a new row with the new keybinding data to the treeview
Arguments:
- handl: the name of the handler associated to the keyBinding
- kb: the new keybinding.
- parent: the parent node of the treeview, where to append the kb
- modif: whether to check the modified field or not.
Returns a Gtk.TreeIter pointing at the new row.
"""
model = self.keyBindingsModel
if parent is None:
parent = self._getIterOf(guilabels.KB_GROUP_DEFAULT)
if parent is not None:
myiter = model.append(parent)
if not kb.keysymstring:
text = None
else:
clickCount = self._clickCountToString(kb.click_count)
modifierNames = keybindings.getModifierNames(kb.modifiers)
keysymstring = kb.keysymstring
text = keybindings.getModifierNames(kb.modifiers) \
+ keysymstring \
+ clickCount
model.set_value(myiter, HANDLER, handl)
model.set_value(myiter, DESCRIP, kb.handler.description)
model.set_value(myiter, MOD_MASK1, str(kb.modifier_mask))
model.set_value(myiter, MOD_USED1, str(kb.modifiers))
model.set_value(myiter, KEY1, kb.keysymstring)
model.set_value(myiter, CLICK_COUNT1, str(kb.click_count))
if text is not None:
model.set_value(myiter, OLDTEXT1, text)
model.set_value(myiter, TEXT1, text)
model.set_value(myiter, MODIF, modif)
model.set_value(myiter, EDITABLE, True)
return myiter
else:
return None
def _insertRowBraille(self, handl, com, inputEvHand,
parent=None, modif=False):
"""Appends a new row with the new braille binding data to the treeview
Arguments:
- handl: the name of the handler associated to the brailleBinding
- com: the BrlTTY command
- inputEvHand: the inputEventHandler with the new brailleBinding
- parent: the parent node of the treeview, where to append the kb
- modif: whether to check the modified field or not.
Returns a Gtk.TreeIter pointing at the new row.
"""
model = self.keyBindingsModel
if parent is None:
parent = self._getIterOf(guilabels.KB_GROUP_BRAILLE)
if parent is not None:
myiter = model.append(parent)
model.set_value(myiter, HANDLER, handl)
model.set_value(myiter, DESCRIP, inputEvHand.description)
model.set_value(myiter, KEY1, str(com))
model.set_value(myiter, TEXT1, braille.command_name[com])
model.set_value(myiter, MODIF, modif)
model.set_value(myiter, EDITABLE, False)
return myiter
else:
return None
def _markModified(self):
""" Mark as modified the user custom key bindings:
"""
try:
self.script.setupInputEventHandlers()
keyBinds = keybindings.KeyBindings()
keyBinds = _settingsManager.overrideKeyBindings(self.script, keyBinds)
keyBind = keybindings.KeyBinding(None, None, None, None)
treeModel = self.keyBindingsModel
myiter = treeModel.get_iter_first()
while myiter is not None:
iterChild = treeModel.iter_children(myiter)
while iterChild is not None:
descrip = treeModel.get_value(iterChild, DESCRIP)
keyBind.handler = \
input_event.InputEventHandler(None, descrip)
if keyBinds.hasKeyBinding(keyBind,
typeOfSearch="description"):
treeModel.set_value(iterChild, MODIF, True)
iterChild = treeModel.iter_next(iterChild)
myiter = treeModel.iter_next(myiter)
except:
debug.printException(debug.LEVEL_SEVERE)
def _populateKeyBindings(self, clearModel=True):
"""Fills the TreeView with the list of Orca keybindings
Arguments:
- clearModel: if True, initially clear out the key bindings model.
"""
self.keyBindView.set_model(None)
self.keyBindView.set_headers_visible(False)
self.keyBindView.hide()
if clearModel:
self.keyBindingsModel.clear()
self.kbindings = None
try:
appName = self.script.app.name
except:
appName = ""
iterApp = self._createNode(appName)
iterOrca = self._createNode(guilabels.KB_GROUP_DEFAULT)
iterUnbound = self._createNode(guilabels.KB_GROUP_UNBOUND)
if not self.kbindings:
self.kbindings = keybindings.KeyBindings()
self.script.setupInputEventHandlers()
allKeyBindings = self.script.getKeyBindings()
defKeyBindings = self.script.getDefaultKeyBindings()
for kb in allKeyBindings.keyBindings:
if not self.kbindings.hasKeyBinding(kb, "strict"):
handl = self.script.getInputEventHandlerKey(kb.handler)
if not defKeyBindings.hasKeyBinding(kb, "description"):
self._insertRow(handl, kb, iterApp)
elif kb.keysymstring:
self._insertRow(handl, kb, iterOrca)
else:
self._insertRow(handl, kb, iterUnbound)
self.kbindings.add(kb)
if not self.keyBindingsModel.iter_has_child(iterApp):
self.keyBindingsModel.remove(iterApp)
if not self.keyBindingsModel.iter_has_child(iterUnbound):
self.keyBindingsModel.remove(iterUnbound)
self._updateOrcaModifier()
self._markModified()
iterBB = self._createNode(guilabels.KB_GROUP_BRAILLE)
self.bbindings = self.script.getBrailleBindings()
for com, inputEvHand in self.bbindings.items():
handl = self.script.getInputEventHandlerKey(inputEvHand)
self._insertRowBraille(handl, com, inputEvHand, iterBB)
self.keyBindView.set_model(self.keyBindingsModel)
self.keyBindView.set_headers_visible(True)
self.keyBindView.expand_all()
self.keyBindingsModel.set_sort_column_id(OLDTEXT1, Gtk.SortType.ASCENDING)
self.keyBindView.show()
# Keep track of new/unbound keybindings that have yet to be applied.
#
self.pendingKeyBindings = {}
def _cleanupSpeechServers(self):
"""Remove unwanted factories and drivers for the current active
factory, when the user dismisses the Orca Preferences dialog."""
for workingFactory in self.workingFactories:
if not (workingFactory == self.speechSystemsChoice):
workingFactory.SpeechServer.shutdownActiveServers()
else:
servers = workingFactory.SpeechServer.getSpeechServers()
for server in servers:
if not (server == self.speechServersChoice):
server.shutdown()
def speechSupportChecked(self, widget):
"""Signal handler for the "toggled" signal for the
speechSupportCheckButton GtkCheckButton widget. The user has
[un]checked the 'Enable Speech' checkbox. Set the 'enableSpeech'
preference to the new value. Set the rest of the speech pane items
[in]sensensitive depending upon whether this checkbox is checked.
Arguments:
- widget: the component that generated the signal.
"""
enable = widget.get_active()
self.prefsDict["enableSpeech"] = enable
self.get_widget("speechOptionsGrid").set_sensitive(enable)
def onlySpeakDisplayedTextToggled(self, widget):
"""Signal handler for the "toggled" signal for the GtkCheckButton
onlySpeakDisplayedText. In addition to updating the preferences,
set the sensitivity of the contextOptionsGrid.
Arguments:
- widget: the component that generated the signal.
"""
enable = widget.get_active()
self.prefsDict["onlySpeakDisplayedText"] = enable
self.get_widget("contextOptionsGrid").set_sensitive(not enable)
def speechSystemsChanged(self, widget):
"""Signal handler for the "changed" signal for the speechSystems
GtkComboBox widget. The user has selected a different speech
system. Clear the existing list of speech servers, and setup
a new list of speech servers based on the new choice. Setup a
new list of voices for the first speech server in the list.
Arguments:
- widget: the component that generated the signal.
"""
if self.initializingSpeech:
return
selectedIndex = widget.get_active()
self.speechSystemsChoice = self.speechSystemsChoices[selectedIndex]
self._setupSpeechServers()
def speechServersChanged(self, widget):
"""Signal handler for the "changed" signal for the speechServers
GtkComboBox widget. The user has selected a different speech
server. Clear the existing list of voices, and setup a new
list of voices based on the new choice.
Arguments:
- widget: the component that generated the signal.
"""
if self.initializingSpeech:
return
selectedIndex = widget.get_active()
self.speechServersChoice = self.speechServersChoices[selectedIndex]
# Whenever the speech servers change, we need to make sure we
# clear whatever family was in use by the current voice types.
# Otherwise, we can end up with family names from one server
# bleeding over (e.g., "Paul" from Fonix ends up getting in
# the "Default" voice type after we switch to eSpeak).
#
try:
del self.defaultVoice[acss.ACSS.FAMILY]
del self.uppercaseVoice[acss.ACSS.FAMILY]
del self.hyperlinkVoice[acss.ACSS.FAMILY]
del self.systemVoice[acss.ACSS.FAMILY]
except:
pass
self._setupVoices()
def speechLanguagesChanged(self, widget):
"""Signal handler for the "value_changed" signal for the languages
GtkComboBox widget. The user has selected a different voice
language. Save the new voice language name based on the new choice.
Arguments:
- widget: the component that generated the signal.
"""
if self.initializingSpeech:
return
selectedIndex = widget.get_active()
try:
self.speechLanguagesChoice = self.speechLanguagesChoices[selectedIndex]
if (self.speechServersChoice, self.speechLanguagesChoice) in \
self.selectedFamilyChoices:
i = self.selectedFamilyChoices[self.speechServersChoice, \
self.speechLanguagesChoice]
family = self.speechFamiliesChoices[i]
name = family[speechserver.VoiceFamily.NAME]
language = family[speechserver.VoiceFamily.LANG]
dialect = family[speechserver.VoiceFamily.DIALECT]
variant = family[speechserver.VoiceFamily.VARIANT]
voiceType = self.get_widget("voiceTypesCombo").get_active()
self._setFamilyNameForVoiceType(voiceType, name, language, dialect, variant)
except:
debug.printException(debug.LEVEL_SEVERE)
# Remember the last family manually selected by the user for the
# current speech server.
#
if not selectedIndex == -1:
self.selectedLanguageChoices[self.speechServersChoice] = selectedIndex
self._setupFamilies()
def speechFamiliesChanged(self, widget):
"""Signal handler for the "value_changed" signal for the families
GtkComboBox widget. The user has selected a different voice
family. Save the new voice family name based on the new choice.
Arguments:
- widget: the component that generated the signal.
"""
if self.initializingSpeech:
return
selectedIndex = widget.get_active()
try:
family = self.speechFamiliesChoices[selectedIndex]
name = family[speechserver.VoiceFamily.NAME]
language = family[speechserver.VoiceFamily.LANG]
dialect = family[speechserver.VoiceFamily.DIALECT]
variant = family[speechserver.VoiceFamily.VARIANT]
voiceType = self.get_widget("voiceTypesCombo").get_active()
self._setFamilyNameForVoiceType(voiceType, name, language, dialect, variant)
except:
debug.printException(debug.LEVEL_SEVERE)
# Remember the last family manually selected by the user for the
# current speech server.
#
if not selectedIndex == -1:
self.selectedFamilyChoices[self.speechServersChoice, \
self.speechLanguagesChoice] = selectedIndex
def voiceTypesChanged(self, widget):
"""Signal handler for the "changed" signal for the voiceTypes
GtkComboBox widget. The user has selected a different voice
type. Setup the new family, rate, pitch and volume component
values based on the new choice.
Arguments:
- widget: the component that generated the signal.
"""
if self.initializingSpeech:
return
voiceType = widget.get_active()
self._setVoiceSettingsForVoiceType(voiceType)
def rateValueChanged(self, widget):
"""Signal handler for the "value_changed" signal for the rateScale
GtkScale widget. The user has changed the current rate value.
Save the new rate value based on the currently selected voice
type.
Arguments:
- widget: the component that generated the signal.
"""
rate = widget.get_value()
voiceType = self.get_widget("voiceTypesCombo").get_active()
self._setRateForVoiceType(voiceType, rate)
voices = _settingsManager.getSetting('voices')
voices[settings.DEFAULT_VOICE][acss.ACSS.RATE] = rate
_settingsManager.setSetting('voices', voices)
def pitchValueChanged(self, widget):
"""Signal handler for the "value_changed" signal for the pitchScale
GtkScale widget. The user has changed the current pitch value.
Save the new pitch value based on the currently selected voice
type.
Arguments:
- widget: the component that generated the signal.
"""
pitch = widget.get_value()
voiceType = self.get_widget("voiceTypesCombo").get_active()
self._setPitchForVoiceType(voiceType, pitch)
voices = _settingsManager.getSetting('voices')
voices[settings.DEFAULT_VOICE][acss.ACSS.AVERAGE_PITCH] = pitch
_settingsManager.setSetting('voices', voices)
def volumeValueChanged(self, widget):
"""Signal handler for the "value_changed" signal for the voiceScale
GtkScale widget. The user has changed the current volume value.
Save the new volume value based on the currently selected voice
type.
Arguments:
- widget: the component that generated the signal.
"""
volume = widget.get_value()
voiceType = self.get_widget("voiceTypesCombo").get_active()
self._setVolumeForVoiceType(voiceType, volume)
voices = _settingsManager.getSetting('voices')
voices[settings.DEFAULT_VOICE][acss.ACSS.GAIN] = volume
_settingsManager.setSetting('voices', voices)
def checkButtonToggled(self, widget):
"""Signal handler for "toggled" signal for basic GtkCheckButton
widgets. The user has altered the state of the checkbox.
Set the preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
# To use this default handler please make sure:
# The name of the setting that will be changed is: settingName
# The id of the widget in the ui should be: settingNameCheckButton
#
settingName = Gtk.Buildable.get_name(widget)
# strip "CheckButton" from the end.
settingName = settingName[:-11]
self.prefsDict[settingName] = widget.get_active()
def keyEchoChecked(self, widget):
"""Signal handler for the "toggled" signal for the
keyEchoCheckbutton GtkCheckButton widget. The user has
[un]checked the 'Enable Key Echo' checkbox. Set the
'enableKeyEcho' preference to the new value. [In]sensitize
the checkboxes for the various types of key echo, depending
upon whether this value is checked or unchecked.
Arguments:
- widget: the component that generated the signal.
"""
self.prefsDict["enableKeyEcho"] = widget.get_active()
self._setKeyEchoItems()
def brailleSelectionChanged(self, widget):
"""Signal handler for the "toggled" signal for the
brailleSelectionNoneButton, brailleSelection7Button,
brailleSelection8Button or brailleSelectionBothButton
GtkRadioButton widgets. The user has toggled the braille
selection indicator value. If this signal was generated
as the result of a radio button getting selected (as
opposed to a radio button losing the selection), set the
'brailleSelectorIndicator' preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
if widget.get_label() == guilabels.BRAILLE_DOT_7:
self.prefsDict["brailleSelectorIndicator"] = \
settings.BRAILLE_UNDERLINE_7
elif widget.get_label() == guilabels.BRAILLE_DOT_8:
self.prefsDict["brailleSelectorIndicator"] = \
settings.BRAILLE_UNDERLINE_8
elif widget.get_label() == guilabels.BRAILLE_DOT_7_8:
self.prefsDict["brailleSelectorIndicator"] = \
settings.BRAILLE_UNDERLINE_BOTH
else:
self.prefsDict["brailleSelectorIndicator"] = \
settings.BRAILLE_UNDERLINE_NONE
def brailleLinkChanged(self, widget):
"""Signal handler for the "toggled" signal for the
brailleLinkNoneButton, brailleLink7Button,
brailleLink8Button or brailleLinkBothButton
GtkRadioButton widgets. The user has toggled the braille
link indicator value. If this signal was generated
as the result of a radio button getting selected (as
opposed to a radio button losing the selection), set the
'brailleLinkIndicator' preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
if widget.get_label() == guilabels.BRAILLE_DOT_7:
self.prefsDict["brailleLinkIndicator"] = \
settings.BRAILLE_UNDERLINE_7
elif widget.get_label() == guilabels.BRAILLE_DOT_8:
self.prefsDict["brailleLinkIndicator"] = \
settings.BRAILLE_UNDERLINE_8
elif widget.get_label() == guilabels.BRAILLE_DOT_7_8:
self.prefsDict["brailleLinkIndicator"] = \
settings.BRAILLE_UNDERLINE_BOTH
else:
self.prefsDict["brailleLinkIndicator"] = \
settings.BRAILLE_UNDERLINE_NONE
def brailleIndicatorChanged(self, widget):
"""Signal handler for the "toggled" signal for the
textBrailleNoneButton, textBraille7Button, textBraille8Button
or textBrailleBothButton GtkRadioButton widgets. The user has
toggled the text attributes braille indicator value. If this signal
was generated as the result of a radio button getting selected
(as opposed to a radio button losing the selection), set the
'textAttributesBrailleIndicator' preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
if widget.get_label() == guilabels.BRAILLE_DOT_7:
self.prefsDict["textAttributesBrailleIndicator"] = \
settings.BRAILLE_UNDERLINE_7
elif widget.get_label() == guilabels.BRAILLE_DOT_8:
self.prefsDict["textAttributesBrailleIndicator"] = \
settings.BRAILLE_UNDERLINE_8
elif widget.get_label() == guilabels.BRAILLE_DOT_7_8:
self.prefsDict["textAttributesBrailleIndicator"] = \
settings.BRAILLE_UNDERLINE_BOTH
else:
self.prefsDict["textAttributesBrailleIndicator"] = \
settings.BRAILLE_UNDERLINE_NONE
def punctuationLevelChanged(self, widget):
"""Signal handler for the "toggled" signal for the noneButton,
someButton or allButton GtkRadioButton widgets. The user has
toggled the speech punctuation level value. If this signal
was generated as the result of a radio button getting selected
(as opposed to a radio button losing the selection), set the
'verbalizePunctuationStyle' preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
if widget.get_label() == guilabels.PUNCTUATION_STYLE_NONE:
self.prefsDict["verbalizePunctuationStyle"] = \
settings.PUNCTUATION_STYLE_NONE
elif widget.get_label() == guilabels.PUNCTUATION_STYLE_SOME:
self.prefsDict["verbalizePunctuationStyle"] = \
settings.PUNCTUATION_STYLE_SOME
elif widget.get_label() == guilabels.PUNCTUATION_STYLE_MOST:
self.prefsDict["verbalizePunctuationStyle"] = \
settings.PUNCTUATION_STYLE_MOST
else:
self.prefsDict["verbalizePunctuationStyle"] = \
settings.PUNCTUATION_STYLE_ALL
def orcaModifierChanged(self, widget):
"""Signal handler for the changed signal for the orcaModifierComboBox
Set the 'orcaModifierKeys' preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
model = widget.get_model()
myIter = widget.get_active_iter()
orcaModifier = model[myIter][0]
self.prefsDict["orcaModifierKeys"] = orcaModifier.split(', ')
def progressBarVerbosityChanged(self, widget):
"""Signal handler for the changed signal for the progressBarVerbosity
GtkComboBox widget. Set the 'progressBarVerbosity' preference to
the new value.
Arguments:
- widget: the component that generated the signal.
"""
model = widget.get_model()
myIter = widget.get_active_iter()
progressBarVerbosity = model[myIter][0]
if progressBarVerbosity == guilabels.PROGRESS_BAR_ALL:
self.prefsDict["progressBarVerbosity"] = \
settings.PROGRESS_BAR_ALL
elif progressBarVerbosity == guilabels.PROGRESS_BAR_WINDOW:
self.prefsDict["progressBarVerbosity"] = \
settings.PROGRESS_BAR_WINDOW
else:
self.prefsDict["progressBarVerbosity"] = \
settings.PROGRESS_BAR_APPLICATION
def capitalizationStyleChanged(self, widget):
model = widget.get_model()
myIter = widget.get_active_iter()
capitalizationStyle = model[myIter][0]
if capitalizationStyle == guilabels.CAPITALIZATION_STYLE_ICON:
self.prefsDict["capitalizationStyle"] = settings.CAPITALIZATION_STYLE_ICON
elif capitalizationStyle == guilabels.CAPITALIZATION_STYLE_SPELL:
self.prefsDict["capitalizationStyle"] = settings.CAPITALIZATION_STYLE_SPELL
else:
self.prefsDict["capitalizationStyle"] = settings.CAPITALIZATION_STYLE_NONE
speech.updateCapitalizationStyle()
def sayAllStyleChanged(self, widget):
"""Signal handler for the "changed" signal for the sayAllStyle
GtkComboBox widget. Set the 'sayAllStyle' preference to the
new value.
Arguments:
- widget: the component that generated the signal.
"""
model = widget.get_model()
myIter = widget.get_active_iter()
sayAllStyle = model[myIter][0]
if sayAllStyle == guilabels.SAY_ALL_STYLE_LINE:
self.prefsDict["sayAllStyle"] = settings.SAYALL_STYLE_LINE
elif sayAllStyle == guilabels.SAY_ALL_STYLE_SENTENCE:
self.prefsDict["sayAllStyle"] = settings.SAYALL_STYLE_SENTENCE
def dateFormatChanged(self, widget):
"""Signal handler for the "changed" signal for the dateFormat
GtkComboBox widget. Set the 'dateFormat' preference to the
new value.
Arguments:
- widget: the component that generated the signal.
"""
dateFormatCombo = widget.get_active()
if dateFormatCombo == DATE_FORMAT_LOCALE:
newFormat = messages.DATE_FORMAT_LOCALE
elif dateFormatCombo == DATE_FORMAT_NUMBERS_DM:
newFormat = messages.DATE_FORMAT_NUMBERS_DM
elif dateFormatCombo == DATE_FORMAT_NUMBERS_MD:
newFormat = messages.DATE_FORMAT_NUMBERS_MD
elif dateFormatCombo == DATE_FORMAT_NUMBERS_DMY:
newFormat = messages.DATE_FORMAT_NUMBERS_DMY
elif dateFormatCombo == DATE_FORMAT_NUMBERS_MDY:
newFormat = messages.DATE_FORMAT_NUMBERS_MDY
elif dateFormatCombo == DATE_FORMAT_NUMBERS_YMD:
newFormat = messages.DATE_FORMAT_NUMBERS_YMD
elif dateFormatCombo == DATE_FORMAT_FULL_DM:
newFormat = messages.DATE_FORMAT_FULL_DM
elif dateFormatCombo == DATE_FORMAT_FULL_MD:
newFormat = messages.DATE_FORMAT_FULL_MD
elif dateFormatCombo == DATE_FORMAT_FULL_DMY:
newFormat = messages.DATE_FORMAT_FULL_DMY
elif dateFormatCombo == DATE_FORMAT_FULL_MDY:
newFormat = messages.DATE_FORMAT_FULL_MDY
elif dateFormatCombo == DATE_FORMAT_FULL_YMD:
newFormat = messages.DATE_FORMAT_FULL_YMD
elif dateFormatCombo == DATE_FORMAT_ABBREVIATED_DM:
newFormat = messages.DATE_FORMAT_ABBREVIATED_DM
elif dateFormatCombo == DATE_FORMAT_ABBREVIATED_MD:
newFormat = messages.DATE_FORMAT_ABBREVIATED_MD
elif dateFormatCombo == DATE_FORMAT_ABBREVIATED_DMY:
newFormat = messages.DATE_FORMAT_ABBREVIATED_DMY
elif dateFormatCombo == DATE_FORMAT_ABBREVIATED_MDY:
newFormat = messages.DATE_FORMAT_ABBREVIATED_MDY
elif dateFormatCombo == DATE_FORMAT_ABBREVIATED_YMD:
newFormat = messages.DATE_FORMAT_ABBREVIATED_YMD
self.prefsDict["presentDateFormat"] = newFormat
def timeFormatChanged(self, widget):
"""Signal handler for the "changed" signal for the timeFormat
GtkComboBox widget. Set the 'timeFormat' preference to the
new value.
Arguments:
- widget: the component that generated the signal.
"""
timeFormatCombo = widget.get_active()
if timeFormatCombo == TIME_FORMAT_LOCALE:
newFormat = messages.TIME_FORMAT_LOCALE
elif timeFormatCombo == TIME_FORMAT_12_HM:
newFormat = messages.TIME_FORMAT_12_HM
elif timeFormatCombo == TIME_FORMAT_12_HMS:
newFormat = messages.TIME_FORMAT_12_HMS
elif timeFormatCombo == TIME_FORMAT_24_HMS:
newFormat = messages.TIME_FORMAT_24_HMS
elif timeFormatCombo == TIME_FORMAT_24_HMS_WITH_WORDS:
newFormat = messages.TIME_FORMAT_24_HMS_WITH_WORDS
elif timeFormatCombo == TIME_FORMAT_24_HM:
newFormat = messages.TIME_FORMAT_24_HM
elif timeFormatCombo == TIME_FORMAT_24_HM_WITH_WORDS:
newFormat = messages.TIME_FORMAT_24_HM_WITH_WORDS
self.prefsDict["presentTimeFormat"] = newFormat
def speechVerbosityChanged(self, widget):
"""Signal handler for the "toggled" signal for the speechBriefButton,
or speechVerboseButton GtkRadioButton widgets. The user has
toggled the speech verbosity level value. If this signal was
generated as the result of a radio button getting selected
(as opposed to a radio button losing the selection), set the
'speechVerbosityLevel' preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
if widget.get_label() == guilabels.VERBOSITY_LEVEL_BRIEF:
self.prefsDict["speechVerbosityLevel"] = \
settings.VERBOSITY_LEVEL_BRIEF
else:
self.prefsDict["speechVerbosityLevel"] = \
settings.VERBOSITY_LEVEL_VERBOSE
def progressBarUpdateIntervalValueChanged(self, widget):
"""Signal handler for the "value_changed" signal for the
progressBarUpdateIntervalSpinButton GtkSpinButton widget.
Arguments:
- widget: the component that generated the signal.
"""
self.prefsDict["progressBarUpdateInterval"] = widget.get_value_as_int()
def brailleFlashTimeValueChanged(self, widget):
self.prefsDict["brailleFlashTime"] = widget.get_value_as_int() * 1000
def abbrevRolenamesChecked(self, widget):
"""Signal handler for the "toggled" signal for the abbrevRolenames
GtkCheckButton widget. The user has [un]checked the 'Abbreviated
Rolenames' checkbox. Set the 'brailleRolenameStyle' preference
to the new value.
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
self.prefsDict["brailleRolenameStyle"] = \
settings.BRAILLE_ROLENAME_STYLE_SHORT
else:
self.prefsDict["brailleRolenameStyle"] = \
settings.BRAILLE_ROLENAME_STYLE_LONG
def brailleVerbosityChanged(self, widget):
"""Signal handler for the "toggled" signal for the brailleBriefButton,
or brailleVerboseButton GtkRadioButton widgets. The user has
toggled the braille verbosity level value. If this signal was
generated as the result of a radio button getting selected
(as opposed to a radio button losing the selection), set the
'brailleVerbosityLevel' preference to the new value.
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
if widget.get_label() == guilabels.VERBOSITY_LEVEL_BRIEF:
self.prefsDict["brailleVerbosityLevel"] = \
settings.VERBOSITY_LEVEL_BRIEF
else:
self.prefsDict["brailleVerbosityLevel"] = \
settings.VERBOSITY_LEVEL_VERBOSE
def keyModifiedToggle(self, cell, path, model, col):
"""When the user changes a checkbox field (boolean field)"""
model[path][col] = not model[path][col]
return
def editingKey(self, cell, editable, path, treeModel):
"""Starts user input of a Key for a selected key binding"""
self._presentMessage(messages.KB_ENTER_NEW_KEY)
orca_state.capturingKeys = True
editable.connect('key-press-event', self.kbKeyPressed)
return
def editingCanceledKey(self, editable):
"""Stops user input of a Key for a selected key binding"""
orca_state.capturingKeys = False
self._capturedKey = []
return
def _processKeyCaptured(self, keyPressedEvent):
"""Called when a new key event arrives and we are capturing keys.
(used for key bindings redefinition)
"""
# We want the keyname rather than the printable character.
# If it's not on the keypad, get the name of the unshifted
# character. (i.e. "1" instead of "!")
#
keycode = keyPressedEvent.hardware_keycode
keymap = Gdk.Keymap.get_default()
entries_for_keycode = keymap.get_entries_for_keycode(keycode)
entries = entries_for_keycode[-1]
eventString = Gdk.keyval_name(entries[0])
eventState = keyPressedEvent.state
orcaMods = settings.orcaModifierKeys
if eventString in orcaMods:
self._capturedKey = ['', keybindings.ORCA_MODIFIER_MASK, 0]
return False
modifierKeys = ['Alt_L', 'Alt_R', 'Control_L', 'Control_R',
'Shift_L', 'Shift_R', 'Meta_L', 'Meta_R',
'Num_Lock', 'Caps_Lock', 'Shift_Lock']
if eventString in modifierKeys:
return False
eventState = eventState & Gtk.accelerator_get_default_mod_mask()
if not self._capturedKey \
or eventString in ['Return', 'Escape']:
self._capturedKey = [eventString, eventState, 1]
return True
string, modifiers, clickCount = self._capturedKey
isOrcaModifier = modifiers & keybindings.ORCA_MODIFIER_MASK
if isOrcaModifier:
eventState |= keybindings.ORCA_MODIFIER_MASK
self._capturedKey = [eventString, eventState, clickCount + 1]
return True
def kbKeyPressed(self, editable, event):
"""Special handler for the key_pressed events when editing the
keybindings. This lets us control what gets inserted into the
entry.
"""
keyProcessed = self._processKeyCaptured(event)
if not keyProcessed:
return True
if not self._capturedKey:
return False
keyName, modifiers, clickCount = self._capturedKey
if not keyName or keyName in ["Return", "Escape"]:
return False
isOrcaModifier = modifiers & keybindings.ORCA_MODIFIER_MASK
if keyName in ["Delete", "BackSpace"] and not isOrcaModifier:
editable.set_text("")
self._presentMessage(messages.KB_DELETED)
self._capturedKey = []
self.newBinding = None
return True
self.newBinding = keybindings.KeyBinding(keyName,
keybindings.defaultModifierMask,
modifiers,
None,
clickCount)
modifierNames = keybindings.getModifierNames(modifiers)
clickCountString = self._clickCountToString(clickCount)
newString = modifierNames + keyName + clickCountString
description = self.pendingKeyBindings.get(newString)
if description is None:
match = lambda x: x.keysymstring == keyName \
and x.modifiers == modifiers \
and x.click_count == clickCount \
and x.handler
matches = list(filter(match, self.kbindings.keyBindings))
if matches:
description = matches[0].handler.description
if description:
msg = messages.KB_ALREADY_BOUND % description
delay = int(1000 * settings.doubleClickTimeout)
GLib.timeout_add(delay, self._presentMessage, msg)
else:
msg = messages.KB_CAPTURED % newString
editable.set_text(newString)
self._presentMessage(msg)
return True
def editedKey(self, cell, path, new_text, treeModel,
modMask, modUsed, key, click_count, text):
"""The user changed the key for a Keybinding: update the model of
the treeview.
"""
orca_state.capturingKeys = False
self._capturedKey = []
myiter = treeModel.get_iter_from_string(path)
try:
originalBinding = treeModel.get_value(myiter, text)
except:
originalBinding = ''
modified = (originalBinding != new_text)
try:
string = self.newBinding.keysymstring
mods = self.newBinding.modifiers
clickCount = self.newBinding.click_count
except:
string = ''
mods = 0
clickCount = 1
mods = mods & Gdk.ModifierType.MODIFIER_MASK
if mods & (1 << pyatspi.MODIFIER_SHIFTLOCK) \
and mods & keybindings.ORCA_MODIFIER_MASK:
mods ^= (1 << pyatspi.MODIFIER_SHIFTLOCK)
treeModel.set(myiter,
modMask, str(keybindings.defaultModifierMask),
modUsed, str(int(mods)),
key, string,
text, new_text,
click_count, str(clickCount),
MODIF, modified)
speech.stop()
if new_text:
message = messages.KB_CAPTURED_CONFIRMATION % new_text
description = treeModel.get_value(myiter, DESCRIP)
self.pendingKeyBindings[new_text] = description
else:
message = messages.KB_DELETED_CONFIRMATION
if modified:
self._presentMessage(message)
self.pendingKeyBindings[originalBinding] = ""
return
def presentToolTipsChecked(self, widget):
"""Signal handler for the "toggled" signal for the
presentToolTipsCheckButton GtkCheckButton widget.
The user has [un]checked the 'Present ToolTips'
checkbox. Set the 'presentToolTips'
preference to the new value if the user can present tooltips.
Arguments:
- widget: the component that generated the signal.
"""
self.prefsDict["presentToolTips"] = widget.get_active()
def keyboardLayoutChanged(self, widget):
"""Signal handler for the "toggled" signal for the generalDesktopButton,
or generalLaptopButton GtkRadioButton widgets. The user has
toggled the keyboard layout value. If this signal was
generated as the result of a radio button getting selected
(as opposed to a radio button losing the selection), set the
'keyboardLayout' preference to the new value. Also set the
matching list of Orca modifier keys
Arguments:
- widget: the component that generated the signal.
"""
if widget.get_active():
if widget.get_label() == guilabels.KEYBOARD_LAYOUT_DESKTOP:
self.prefsDict["keyboardLayout"] = \
settings.GENERAL_KEYBOARD_LAYOUT_DESKTOP
self.prefsDict["orcaModifierKeys"] = \
settings.DESKTOP_MODIFIER_KEYS
else:
self.prefsDict["keyboardLayout"] = \
settings.GENERAL_KEYBOARD_LAYOUT_LAPTOP
self.prefsDict["orcaModifierKeys"] = \
settings.LAPTOP_MODIFIER_KEYS
def pronunciationAddButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
pronunciationAddButton GtkButton widget. The user has clicked
the Add button on the Pronunciation pane. A new row will be
added to the end of the pronunciation dictionary list. Both the
actual and replacement strings will initially be set to an empty
string. Focus will be moved to that row.
Arguments:
- widget: the component that generated the signal.
"""
model = self.pronunciationView.get_model()
thisIter = model.append()
model.set(thisIter, ACTUAL, "", REPLACEMENT, "")
path = model.get_path(thisIter)
col = self.pronunciationView.get_column(0)
self.pronunciationView.grab_focus()
self.pronunciationView.set_cursor(path, col, True)
def pronunciationDeleteButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
pronunciationDeleteButton GtkButton widget. The user has clicked
the Delete button on the Pronunciation pane. The row in the
pronunciation dictionary list with focus will be deleted.
Arguments:
- widget: the component that generated the signal.
"""
model, oldIter = self.pronunciationView.get_selection().get_selected()
model.remove(oldIter)
def textSelectAllButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
textSelectAllButton GtkButton widget. The user has clicked
the Speak all button. Check all the text attributes and
then update the "enabledSpokenTextAttributes" and
"enabledBrailledTextAttributes" preference strings.
Arguments:
- widget: the component that generated the signal.
"""
attributes = _settingsManager.getSetting('allTextAttributes')
self._setSpokenTextAttributes(
self.getTextAttributesView, attributes, True)
self._setBrailledTextAttributes(
self.getTextAttributesView, attributes, True)
self._updateTextDictEntry()
def textUnselectAllButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
textUnselectAllButton GtkButton widget. The user has clicked
the Speak none button. Uncheck all the text attributes and
then update the "enabledSpokenTextAttributes" and
"enabledBrailledTextAttributes" preference strings.
Arguments:
- widget: the component that generated the signal.
"""
attributes = _settingsManager.getSetting('allTextAttributes')
self._setSpokenTextAttributes(
self.getTextAttributesView, attributes, False)
self._setBrailledTextAttributes(
self.getTextAttributesView, attributes, False)
self._updateTextDictEntry()
def textResetButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
textResetButton GtkButton widget. The user has clicked
the Reset button. Reset all the text attributes to their
initial state and then update the "enabledSpokenTextAttributes"
and "enabledBrailledTextAttributes" preference strings.
Arguments:
- widget: the component that generated the signal.
"""
attributes = _settingsManager.getSetting('allTextAttributes')
self._setSpokenTextAttributes(
self.getTextAttributesView, attributes, False)
self._setBrailledTextAttributes(
self.getTextAttributesView, attributes, False)
attributes = _settingsManager.getSetting('enabledSpokenTextAttributes')
self._setSpokenTextAttributes(
self.getTextAttributesView, attributes, True)
attributes = \
_settingsManager.getSetting('enabledBrailledTextAttributes')
self._setBrailledTextAttributes(
self.getTextAttributesView, attributes, True)
self._updateTextDictEntry()
def textMoveToTopButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
textMoveToTopButton GtkButton widget. The user has clicked
the Move to top button. Move the selected rows in the text
attribute view to the very top of the list and then update
the "enabledSpokenTextAttributes" and "enabledBrailledTextAttributes"
preference strings.
Arguments:
- widget: the component that generated the signal.
"""
textSelection = self.getTextAttributesView.get_selection()
[model, paths] = textSelection.get_selected_rows()
for path in paths:
thisIter = model.get_iter(path)
model.move_after(thisIter, None)
self._updateTextDictEntry()
def textMoveUpOneButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
textMoveUpOneButton GtkButton widget. The user has clicked
the Move up one button. Move the selected rows in the text
attribute view up one row in the list and then update the
"enabledSpokenTextAttributes" and "enabledBrailledTextAttributes"
preference strings.
Arguments:
- widget: the component that generated the signal.
"""
textSelection = self.getTextAttributesView.get_selection()
[model, paths] = textSelection.get_selected_rows()
for path in paths:
thisIter = model.get_iter(path)
indices = path.get_indices()
if indices[0]:
otherIter = model.iter_nth_child(None, indices[0]-1)
model.swap(thisIter, otherIter)
self._updateTextDictEntry()
def textMoveDownOneButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
textMoveDownOneButton GtkButton widget. The user has clicked
the Move down one button. Move the selected rows in the text
attribute view down one row in the list and then update the
"enabledSpokenTextAttributes" and "enabledBrailledTextAttributes"
preference strings.
Arguments:
- widget: the component that generated the signal.
"""
textSelection = self.getTextAttributesView.get_selection()
[model, paths] = textSelection.get_selected_rows()
noRows = model.iter_n_children(None)
for path in paths:
thisIter = model.get_iter(path)
indices = path.get_indices()
if indices[0] < noRows-1:
otherIter = model.iter_next(thisIter)
model.swap(thisIter, otherIter)
self._updateTextDictEntry()
def textMoveToBottomButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the
textMoveToBottomButton GtkButton widget. The user has clicked
the Move to bottom button. Move the selected rows in the text
attribute view to the bottom of the list and then update the
"enabledSpokenTextAttributes" and "enabledBrailledTextAttributes"
preference strings.
Arguments:
- widget: the component that generated the signal.
"""
textSelection = self.getTextAttributesView.get_selection()
[model, paths] = textSelection.get_selected_rows()
for path in paths:
thisIter = model.get_iter(path)
model.move_before(thisIter, None)
self._updateTextDictEntry()
def helpButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the helpButton
GtkButton widget. The user has clicked the Help button.
Arguments:
- widget: the component that generated the signal.
"""
orca.helpForOrca(page="preferences")
def restoreSettings(self):
"""Restore the settings we saved away when opening the preferences
dialog."""
# Restore the default rate/pitch/gain,
# in case the user played with the sliders.
#
voices = _settingsManager.getSetting('voices')
defaultVoice = voices[settings.DEFAULT_VOICE]
defaultVoice[acss.ACSS.GAIN] = self.savedGain
defaultVoice[acss.ACSS.AVERAGE_PITCH] = self.savedPitch
defaultVoice[acss.ACSS.RATE] = self.savedRate
def saveBasicSettings(self):
if not self._isInitialSetup:
self.restoreSettings()
enable = self.get_widget("speechSupportCheckButton").get_active()
self.prefsDict["enableSpeech"] = enable
if self.speechSystemsChoice:
self.prefsDict["speechServerFactory"] = \
self.speechSystemsChoice.__name__
if self.speechServersChoice:
self.prefsDict["speechServerInfo"] = \
self.speechServersChoice.getInfo()
if self.defaultVoice is not None:
self.prefsDict["voices"] = {
settings.DEFAULT_VOICE: acss.ACSS(self.defaultVoice),
settings.UPPERCASE_VOICE: acss.ACSS(self.uppercaseVoice),
settings.HYPERLINK_VOICE: acss.ACSS(self.hyperlinkVoice),
settings.SYSTEM_VOICE: acss.ACSS(self.systemVoice),
}
def applyButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the applyButton
GtkButton widget. The user has clicked the Apply button.
Write out the users preferences. If GNOME accessibility hadn't
previously been enabled, warn the user that they will need to
log out. Shut down any active speech servers that were started.
Reload the users preferences to get the new speech, braille and
key echo value to take effect. Do not dismiss the configuration
window.
Arguments:
- widget: the component that generated the signal.
"""
self.saveBasicSettings()
activeProfile = self.getComboBoxList(self.profilesCombo)
startingProfile = self.getComboBoxList(self.startingProfileCombo)
self.prefsDict['profile'] = activeProfile
self.prefsDict['activeProfile'] = activeProfile
self.prefsDict['startingProfile'] = startingProfile
_settingsManager.setStartingProfile(startingProfile)
self.writeUserPreferences()
orca.loadUserSettings(self.script)
braille.checkBrailleSetting()
self._initSpeechState()
self._populateKeyBindings()
self.__initProfileCombo()
def cancelButtonClicked(self, widget):
"""Signal handler for the "clicked" signal for the cancelButton
GtkButton widget. The user has clicked the Cancel button.
Don't write out the preferences. Destroy the configuration window.
Arguments:
- widget: the component that generated the signal.
"""
self.windowClosed(widget)
self.get_widget("orcaSetupWindow").destroy()
def okButtonClicked(self, widget=None):
"""Signal handler for the "clicked" signal for the okButton
GtkButton widget. The user has clicked the OK button.
Write out the users preferences. If GNOME accessibility hadn't
previously been enabled, warn the user that they will need to
log out. Shut down any active speech servers that were started.
Reload the users preferences to get the new speech, braille and
key echo value to take effect. Hide the configuration window.
Arguments:
- widget: the component that generated the signal.
"""
self.applyButtonClicked(widget)
self._cleanupSpeechServers()
self.get_widget("orcaSetupWindow").destroy()
def windowClosed(self, widget):
"""Signal handler for the "closed" signal for the orcaSetupWindow
GtkWindow widget. This is effectively the same as pressing the
cancel button, except the window is destroyed for us.
Arguments:
- widget: the component that generated the signal.
"""
factory = _settingsManager.getSetting('speechServerFactory')
if factory:
self._setSpeechSystemsChoice(factory)
server = _settingsManager.getSetting('speechServerInfo')
if server:
self._setSpeechServersChoice(server)
self._cleanupSpeechServers()
self.restoreSettings()
def windowDestroyed(self, widget):
"""Signal handler for the "destroyed" signal for the orcaSetupWindow
GtkWindow widget. Reset orca_state.orcaOS to None, so that the
GUI can be rebuilt from the GtkBuilder file the next time the user
wants to display the configuration GUI.
Arguments:
- widget: the component that generated the signal.
"""
self.keyBindView.set_model(None)
self.getTextAttributesView.set_model(None)
self.pronunciationView.set_model(None)
self.keyBindView.set_headers_visible(False)
self.getTextAttributesView.set_headers_visible(False)
self.pronunciationView.set_headers_visible(False)
self.keyBindView.hide()
self.getTextAttributesView.hide()
self.pronunciationView.hide()
orca_state.orcaOS = None
def showProfileGUI(self, widget):
"""Show profile Dialog to add a new one"""
orca_gui_profile.showProfileUI(self)
def saveProfile(self, profileToSaveLabel):
"""Creates a new profile based on the name profileToSaveLabel and
updates the Preferences dialog combo boxes accordingly."""
if not profileToSaveLabel:
return
profileToSave = profileToSaveLabel.replace(' ', '_').lower()
profile = [profileToSaveLabel, profileToSave]
def saveActiveProfile(newProfile = True):
if newProfile:
activeProfileIter = self.profilesComboModel.append(profile)
self.profilesCombo.set_active_iter(activeProfileIter)
self.prefsDict['profile'] = profile
self.prefsDict['activeProfile'] = profile
self.saveBasicSettings()
self.writeUserPreferences()
availableProfiles = [p[1] for p in self.__getAvailableProfiles()]
if isinstance(profileToSave, str) \
and profileToSave != '' \
and not profileToSave in availableProfiles \
and profileToSave != self._defaultProfile[1]:
saveActiveProfile()
else:
if profileToSave is not None:
message = guilabels.PROFILE_CONFLICT_MESSAGE % \
("<b>%s</b>" % GLib.markup_escape_text(profileToSaveLabel))
dialog = Gtk.MessageDialog(None,
Gtk.DialogFlags.MODAL,
type=Gtk.MessageType.INFO,
buttons=Gtk.ButtonsType.YES_NO)
dialog.set_markup("<b>%s</b>" % guilabels.PROFILE_CONFLICT_LABEL)
dialog.format_secondary_markup(message)
dialog.set_title(guilabels.PROFILE_CONFLICT_TITLE)
response = dialog.run()
if response == Gtk.ResponseType.YES:
dialog.destroy()
saveActiveProfile(False)
else:
dialog.destroy()
def removeProfileButtonClicked(self, widget):
"""Remove profile button clicked handler
If we removed the last profile, a default one will automatically get
added back by the settings manager.
"""
oldProfile = self.getComboBoxList(self.profilesCombo)
message = guilabels.PROFILE_REMOVE_MESSAGE % \
("<b>%s</b>" % GLib.markup_escape_text(oldProfile[0]))
dialog = Gtk.MessageDialog(self.window, Gtk.DialogFlags.MODAL,
type=Gtk.MessageType.INFO,
buttons=Gtk.ButtonsType.YES_NO)
dialog.set_markup("<b>%s</b>" % guilabels.PROFILE_REMOVE_LABEL)
dialog.format_secondary_markup(message)
if dialog.run() == Gtk.ResponseType.YES:
# If we remove the currently used starting profile, fallback on
# the first listed profile, or the default one if there's
# nothing better
newStartingProfile = self.prefsDict.get('startingProfile')
if not newStartingProfile or newStartingProfile == oldProfile:
newStartingProfile = self._defaultProfile
for row in self.profilesComboModel:
rowProfile = row[:]
if rowProfile != oldProfile:
newStartingProfile = rowProfile
break
# Update the current profile to the active profile unless we're
# removing that one, in which case we use the new starting
# profile
newProfile = self.prefsDict.get('activeProfile')
if not newProfile or newProfile == oldProfile:
newProfile = newStartingProfile
_settingsManager.removeProfile(oldProfile[1])
self.loadProfile(newProfile)
# Make sure nothing is referencing the removed profile anymore
startingProfile = self.prefsDict.get('startingProfile')
if not startingProfile or startingProfile == oldProfile:
self.prefsDict['startingProfile'] = newStartingProfile
_settingsManager.setStartingProfile(newStartingProfile)
self.writeUserPreferences()
dialog.destroy()
def loadProfileButtonClicked(self, widget):
"""Load profile button clicked handler"""
if self._isInitialSetup:
return
dialog = Gtk.MessageDialog(None,
Gtk.DialogFlags.MODAL,
type=Gtk.MessageType.INFO,
buttons=Gtk.ButtonsType.YES_NO)
dialog.set_markup("<b>%s</b>" % guilabels.PROFILE_LOAD_LABEL)
dialog.format_secondary_markup(guilabels.PROFILE_LOAD_MESSAGE)
response = dialog.run()
if response == Gtk.ResponseType.YES:
dialog.destroy()
self.loadSelectedProfile()
else:
dialog.destroy()
def loadSelectedProfile(self):
"""Load selected profile"""
activeProfile = self.getComboBoxList(self.profilesCombo)
self.loadProfile(activeProfile)
def loadProfile(self, profile):
"""Load profile"""
self.saveBasicSettings()
self.prefsDict['activeProfile'] = profile
_settingsManager.setProfile(profile[1])
self.prefsDict = _settingsManager.getGeneralSettings(profile[1])
orca.loadUserSettings(skipReloadMessage=True)
self._initGUIState()
braille.checkBrailleSetting()
self._initSpeechState()
self._populateKeyBindings()
self.__initProfileCombo()
| GNOME/orca | src/orca/orca_gui_prefs.py | Python | lgpl-2.1 | 142,434 |
# -*- coding: utf-8 -*-
# Copyright (C) 2010, 2011, 2012, 2013 Sebastian Wiesner <lunaryorn@gmail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# pylint: disable=anomalous-backslash-in-string
"""
pyudev.pyqt4
============
PyQt4 integration.
:class:`MonitorObserver` integrates device monitoring into the PyQt4\_
mainloop by turning device events into Qt signals.
:mod:`PyQt4.QtCore` from PyQt4\_ must be available when importing this
module.
.. _PyQt4: http://riverbankcomputing.co.uk/software/pyqt/intro
.. moduleauthor:: Sebastian Wiesner <lunaryorn@gmail.com>
"""
from __future__ import (print_function, division, unicode_literals,
absolute_import)
from PyQt4.QtCore import QSocketNotifier, QObject, pyqtSignal
from pyudev._util import text_type
from pyudev.core import Device
from pyudev._qt_base import QUDevMonitorObserverMixin, MonitorObserverMixin
class MonitorObserver(QObject, MonitorObserverMixin):
"""An observer for device events integrating into the :mod:`PyQt4` mainloop.
This class inherits :class:`~PyQt4.QtCore.QObject` to turn device events
into Qt signals:
>>> from pyudev import Context, Monitor
>>> from pyudev.pyqt4 import MonitorObserver
>>> context = Context()
>>> monitor = Monitor.from_netlink(context)
>>> monitor.filter_by(subsystem='input')
>>> observer = MonitorObserver(monitor)
>>> def device_event(device):
... print('event {0} on device {1}'.format(device.action, device))
>>> observer.deviceEvent.connect(device_event)
>>> monitor.start()
This class is a child of :class:`~PyQt4.QtCore.QObject`.
"""
#: emitted upon arbitrary device events
deviceEvent = pyqtSignal(Device)
def __init__(self, monitor, parent=None):
"""
Observe the given ``monitor`` (a :class:`~pyudev.Monitor`):
``parent`` is the parent :class:`~PyQt4.QtCore.QObject` of this
object. It is passed unchanged to the inherited constructor of
:class:`~PyQt4.QtCore.QObject`.
"""
QObject.__init__(self, parent)
self._setup_notifier(monitor, QSocketNotifier)
class QUDevMonitorObserver(QObject, QUDevMonitorObserverMixin):
"""An observer for device events integrating into the :mod:`PyQt4` mainloop.
.. deprecated:: 0.17
Will be removed in 1.0. Use :class:`MonitorObserver` instead.
"""
#: emitted upon arbitrary device events
deviceEvent = pyqtSignal(text_type, Device)
#: emitted, if a device was added
deviceAdded = pyqtSignal(Device)
#: emitted, if a device was removed
deviceRemoved = pyqtSignal(Device)
#: emitted, if a device was changed
deviceChanged = pyqtSignal(Device)
#: emitted, if a device was moved
deviceMoved = pyqtSignal(Device)
def __init__(self, monitor, parent=None):
"""
Observe the given ``monitor`` (a :class:`~pyudev.Monitor`):
``parent`` is the parent :class:`~PyQt4.QtCore.QObject` of this
object. It is passed unchanged to the inherited constructor of
:class:`~PyQt4.QtCore.QObject`.
"""
QObject.__init__(self, parent)
self._setup_notifier(monitor, QSocketNotifier)
| mulkieran/pyudev | pyudev/pyqt4.py | Python | lgpl-2.1 | 3,930 |
#!/usr/bin/python
"""Test of tree output using Firefox."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(PauseAction(3000))
sequence.append(KeyComboAction("<Alt>b"))
sequence.append(KeyComboAction("Return"))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Tab"))
sequence.append(PauseAction(3000))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift>Tab"))
sequence.append(utils.AssertPresentationAction(
"1. Shift Tab for tree",
["BRAILLE LINE: 'Firefox application Library frame All Bookmarks expanded TREE LEVEL 1'",
" VISIBLE: 'All Bookmarks expanded TREE LEVE', cursor=1",
"SPEECH OUTPUT: 'All Bookmarks.'",
"SPEECH OUTPUT: 'expanded.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"2. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Toolbar TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Toolbar TREE LEVEL 2', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar.'",
"SPEECH OUTPUT: 'tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"3. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu.'",
"SPEECH OUTPUT: 'collapsed.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"4. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu tree item.'",
"SPEECH OUTPUT: '2 of 3.'",
"SPEECH OUTPUT: 'collapsed tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"5. Right Arrow to expand folder",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'expanded'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"6. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu tree item.'",
"SPEECH OUTPUT: '2 of 3.'",
"SPEECH OUTPUT: 'expanded tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"7. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame GNOME TREE LEVEL 3'",
" VISIBLE: 'GNOME TREE LEVEL 3', cursor=1",
"SPEECH OUTPUT: 'GNOME.'",
"SPEECH OUTPUT: 'tree level 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"8. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame GNOME TREE LEVEL 3'",
" VISIBLE: 'GNOME TREE LEVEL 3', cursor=1",
"SPEECH OUTPUT: 'GNOME tree item.'",
"SPEECH OUTPUT: '1 of 2.'",
"SPEECH OUTPUT: 'tree level 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"9. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu.'",
"SPEECH OUTPUT: 'expanded.'",
"SPEECH OUTPUT: 'tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Left"))
sequence.append(utils.AssertPresentationAction(
"10. Left Arrow to collapse folder",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'collapsed'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"11. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Toolbar TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Toolbar TREE LEVEL 2', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"12. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame All Bookmarks expanded TREE LEVEL 1'",
" VISIBLE: 'All Bookmarks expanded TREE LEVE', cursor=1",
"SPEECH OUTPUT: 'All Bookmarks.'",
"SPEECH OUTPUT: 'expanded.'",
"SPEECH OUTPUT: 'tree level 1'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"13. Tab back to tree table",
["BRAILLE LINE: 'Firefox application Library frame tree table Bookmarks Toolbar table row TREE LEVEL 1'",
" VISIBLE: 'Bookmarks Toolbar table row TR', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar '"]))
sequence.append(KeyComboAction("<Alt>F4"))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
| GNOME/orca | test/keystrokes/firefox/ui_role_tree.py | Python | lgpl-2.1 | 5,957 |
# This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
| gonicus/gosa | backend/src/gosa/backend/objects/backend/registry.py | Python | lgpl-2.1 | 2,337 |
# -*- coding: utf-8 -*-
"""Additional helper functions for the optlang solvers.
All functions integrate well with the context manager, meaning that
all operations defined here are automatically reverted when used in a
`with model:` block.
The functions defined here together with the existing model functions should
allow you to implement custom flux analysis methods with ease.
"""
from __future__ import absolute_import
import re
from functools import partial
from collections import namedtuple
from types import ModuleType
from warnings import warn
import optlang
from optlang.symbolics import Basic, Zero
from cobra.exceptions import OptimizationError, OPTLANG_TO_EXCEPTIONS_DICT
from cobra.util.context import get_context
class SolverNotFound(Exception):
"""A simple Exception when a solver can not be found."""
pass
# Define all the solvers that are found in optlang.
solvers = {match.split("_")[0]: getattr(optlang, match)
for match in dir(optlang) if "_interface" in match}
# Defines all the QP solvers implemented in optlang.
qp_solvers = ["cplex"] # QP in gurobi not implemented yet
def linear_reaction_coefficients(model, reactions=None):
"""Coefficient for the reactions in a linear objective.
Parameters
----------
model : cobra model
the model object that defined the objective
reactions : list
an optional list for the reactions to get the coefficients for. All
reactions if left missing.
Returns
-------
dict
A dictionary where the key is the reaction object and the value is
the corresponding coefficient. Empty dictionary if there are no
linear terms in the objective.
"""
linear_coefficients = {}
reactions = model.reactions if not reactions else reactions
try:
objective_expression = model.solver.objective.expression
coefficients = objective_expression.as_coefficients_dict()
except AttributeError:
return linear_coefficients
for rxn in reactions:
forward_coefficient = coefficients.get(rxn.forward_variable, 0)
reverse_coefficient = coefficients.get(rxn.reverse_variable, 0)
if forward_coefficient != 0:
if forward_coefficient == -reverse_coefficient:
linear_coefficients[rxn] = float(forward_coefficient)
return linear_coefficients
def _valid_atoms(model, expression):
"""Check whether a sympy expression references the correct variables.
Parameters
----------
model : cobra.Model
The model in which to check for variables.
expression : sympy.Basic
A sympy expression.
Returns
-------
boolean
True if all referenced variables are contained in model, False
otherwise.
"""
atoms = expression.atoms(optlang.interface.Variable)
return all(a.problem is model.solver for a in atoms)
def set_objective(model, value, additive=False):
"""Set the model objective.
Parameters
----------
model : cobra model
The model to set the objective for
value : model.problem.Objective,
e.g. optlang.glpk_interface.Objective, sympy.Basic or dict
If the model objective is linear, the value can be a new Objective
object or a dictionary with linear coefficients where each key is a
reaction and the element the new coefficient (float).
If the objective is not linear and `additive` is true, only values
of class Objective.
additive : bool
If true, add the terms to the current objective, otherwise start with
an empty objective.
"""
interface = model.problem
reverse_value = model.solver.objective.expression
reverse_value = interface.Objective(
reverse_value, direction=model.solver.objective.direction,
sloppy=True)
if isinstance(value, dict):
if not model.objective.is_Linear:
raise ValueError('can only update non-linear objectives '
'additively using object of class '
'model.problem.Objective, not %s' %
type(value))
if not additive:
model.solver.objective = interface.Objective(
Zero, direction=model.solver.objective.direction)
for reaction, coef in value.items():
model.solver.objective.set_linear_coefficients(
{reaction.forward_variable: coef,
reaction.reverse_variable: -coef})
elif isinstance(value, (Basic, optlang.interface.Objective)):
if isinstance(value, Basic):
value = interface.Objective(
value, direction=model.solver.objective.direction,
sloppy=False)
# Check whether expression only uses variables from current model
# clone the objective if not, faster than cloning without checking
if not _valid_atoms(model, value.expression):
value = interface.Objective.clone(value, model=model.solver)
if not additive:
model.solver.objective = value
else:
model.solver.objective += value.expression
else:
raise TypeError(
'%r is not a valid objective for %r.' % (value, model.solver))
context = get_context(model)
if context:
def reset():
model.solver.objective = reverse_value
model.solver.objective.direction = reverse_value.direction
context(reset)
def interface_to_str(interface):
"""Give a string representation for an optlang interface.
Parameters
----------
interface : string, ModuleType
Full name of the interface in optlang or cobra representation.
For instance 'optlang.glpk_interface' or 'optlang-glpk'.
Returns
-------
string
The name of the interface as a string
"""
if isinstance(interface, ModuleType):
interface = interface.__name__
return re.sub(r"optlang.|.interface", "", interface)
def get_solver_name(mip=False, qp=False):
"""Select a solver for a given optimization problem.
Parameters
----------
mip : bool
Does the solver require mixed integer linear programming capabilities?
qp : bool
Does the solver require quadratic programming capabilities?
Returns
-------
string
The name of feasible solver.
Raises
------
SolverNotFound
If no suitable solver could be found.
"""
if len(solvers) == 0:
raise SolverNotFound("no solvers installed")
# Those lists need to be updated as optlang implements more solvers
mip_order = ["gurobi", "cplex", "glpk"]
lp_order = ["glpk", "cplex", "gurobi"]
qp_order = ["cplex"]
if mip is False and qp is False:
for solver_name in lp_order:
if solver_name in solvers:
return solver_name
# none of them are in the list order - so return the first one
return list(solvers)[0]
elif qp: # mip does not yet matter for this determination
for solver_name in qp_order:
if solver_name in solvers:
return solver_name
raise SolverNotFound("no qp-capable solver found")
else:
for solver_name in mip_order:
if solver_name in solvers:
return solver_name
raise SolverNotFound("no mip-capable solver found")
def choose_solver(model, solver=None, qp=False):
"""Choose a solver given a solver name and model.
This will choose a solver compatible with the model and required
capabilities. Also respects model.solver where it can.
Parameters
----------
model : a cobra model
The model for which to choose the solver.
solver : str, optional
The name of the solver to be used. Optlang solvers should be prefixed
by "optlang-", for instance "optlang-glpk".
qp : boolean, optional
Whether the solver needs Quadratic Programming capabilities.
Returns
-------
legacy : boolean
Whether the returned solver is a legacy (old cobra solvers) version or
an optlang solver (legacy = False).
solver : a cobra or optlang solver interface
Returns a valid solver for the problem. May be a cobra solver or an
optlang interface.
Raises
------
SolverNotFound
If no suitable solver could be found.
"""
legacy = False
if solver is None:
solver = model.problem
elif "optlang-" in solver:
solver = interface_to_str(solver)
solver = solvers[solver]
else:
legacy = True
solver = legacy_solvers.solver_dict[solver]
# Check for QP, raise error if no QP solver found
# optlang only since old interface interprets None differently
if qp and interface_to_str(solver) not in qp_solvers:
solver = solvers[get_solver_name(qp=True)]
return legacy, solver
def add_cons_vars_to_problem(model, what, **kwargs):
"""Add variables and constraints to a Model's solver object.
Useful for variables and constraints that can not be expressed with
reactions and lower/upper bounds. Will integrate with the Model's context
manager in order to revert changes upon leaving the context.
Parameters
----------
model : a cobra model
The model to which to add the variables and constraints.
what : list or tuple of optlang variables or constraints.
The variables or constraints to add to the model. Must be of class
`model.problem.Variable` or
`model.problem.Constraint`.
**kwargs : keyword arguments
passed to solver.add()
"""
context = get_context(model)
model.solver.add(what, **kwargs)
if context:
context(partial(model.solver.remove, what))
def remove_cons_vars_from_problem(model, what):
"""Remove variables and constraints from a Model's solver object.
Useful to temporarily remove variables and constraints from a Models's
solver object.
Parameters
----------
model : a cobra model
The model from which to remove the variables and constraints.
what : list or tuple of optlang variables or constraints.
The variables or constraints to remove from the model. Must be of
class `model.problem.Variable` or
`model.problem.Constraint`.
"""
context = get_context(model)
model.solver.remove(what)
if context:
context(partial(model.solver.add, what))
def add_absolute_expression(model, expression, name="abs_var", ub=None,
difference=0, add=True):
"""Add the absolute value of an expression to the model.
Also defines a variable for the absolute value that can be used in other
objectives or constraints.
Parameters
----------
model : a cobra model
The model to which to add the absolute expression.
expression : A sympy expression
Must be a valid expression within the Model's solver object. The
absolute value is applied automatically on the expression.
name : string
The name of the newly created variable.
ub : positive float
The upper bound for the variable.
difference : positive float
The difference between the expression and the variable.
add : bool
Whether to add the variable to the model at once.
Returns
-------
namedtuple
A named tuple with variable and two constraints (upper_constraint,
lower_constraint) describing the new variable and the constraints
that assign the absolute value of the expression to it.
"""
Components = namedtuple('Components', ['variable', 'upper_constraint',
'lower_constraint'])
variable = model.problem.Variable(name, lb=0, ub=ub)
# The following constraints enforce variable > expression and
# variable > -expression
upper_constraint = model.problem.Constraint(expression - variable,
ub=difference,
name="abs_pos_" + name),
lower_constraint = model.problem.Constraint(expression + variable,
lb=difference,
name="abs_neg_" + name)
to_add = Components(variable, upper_constraint, lower_constraint)
if add:
add_cons_vars_to_problem(model, to_add)
return to_add
def fix_objective_as_constraint(model, fraction=1, bound=None,
name='fixed_objective_{}'):
"""Fix current objective as an additional constraint.
When adding constraints to a model, such as done in pFBA which
minimizes total flux, these constraints can become too powerful,
resulting in solutions that satisfy optimality but sacrifices too
much for the original objective function. To avoid that, we can fix
the current objective value as a constraint to ignore solutions that
give a lower (or higher depending on the optimization direction)
objective value than the original model.
When done with the model as a context, the modification to the
objective will be reverted when exiting that context.
Parameters
----------
model : cobra.Model
The model to operate on
fraction : float
The fraction of the optimum the objective is allowed to reach.
bound : float, None
The bound to use instead of fraction of maximum optimal value. If
not None, fraction is ignored.
name : str
Name of the objective. May contain one `{}` placeholder which is filled
with the name of the old objective.
"""
fix_objective_name = name.format(model.objective.name)
if fix_objective_name in model.constraints:
model.solver.remove(fix_objective_name)
if bound is None:
bound = model.slim_optimize(error_value=None) * fraction
if model.objective.direction == 'max':
ub, lb = None, bound
else:
ub, lb = bound, None
constraint = model.problem.Constraint(
model.objective.expression,
name=fix_objective_name, ub=ub, lb=lb)
add_cons_vars_to_problem(model, constraint, sloppy=True)
def check_solver_status(status, raise_error=False):
"""Perform standard checks on a solver's status."""
if status == optlang.interface.OPTIMAL:
return
elif status == optlang.interface.INFEASIBLE and not raise_error:
warn("solver status is '{}'".format(status), UserWarning)
elif status is None:
raise RuntimeError(
"model was not optimized yet or solver context switched")
else:
raise OptimizationError("solver status is '{}'".format(status))
def assert_optimal(model, message='optimization failed'):
"""Assert model solver status is optimal.
Do nothing if model solver status is optimal, otherwise throw
appropriate exception depending on the status.
Parameters
----------
model : cobra.Model
The model to check the solver status for.
message : str (optional)
Message to for the exception if solver status was not optimal.
"""
if model.solver.status != optlang.interface.OPTIMAL:
raise OPTLANG_TO_EXCEPTIONS_DICT[model.solver.status](message)
import cobra.solvers as legacy_solvers # noqa
| zakandrewking/cobrapy | cobra/util/solver.py | Python | lgpl-2.1 | 15,435 |
from __future__ import absolute_import
import json
class JSONRenderer:
"""
Renders a mystery as JSON
"""
def render(self, mystery):
return json.dumps(mystery.encode(), indent=4)
| chjacobsen/mystery-murder-generator | mmgen/renderers/json.py | Python | lgpl-3.0 | 204 |
"""General-use classes to interact with the ApplicationAutoScaling service through CloudFormation.
See Also:
`AWS developer guide for ApplicationAutoScaling
<https://docs.aws.amazon.com/autoscaling/application/APIReference/Welcome.html>`_
"""
# noinspection PyUnresolvedReferences
from .._raw import applicationautoscaling as _raw
# noinspection PyUnresolvedReferences
from .._raw.applicationautoscaling import *
| garyd203/flying-circus | src/flyingcircus/service/applicationautoscaling.py | Python | lgpl-3.0 | 424 |
#!/usr/bin/env python
import sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, action='store', dest='input', default=None, help="Input file")
args = parser.parse_args()
stats = dict()
if args.input is None:
print "Error: No input file"
with open(args.input) as in_file:
for line in in_file.readlines():
time = int(line.split()[0])
tx_bytes = int(line.split()[1])
stats[time] = tx_bytes
stats = sorted(stats.items())
start_time = stats[0][0]
prev_tx = stats[0][1]
no_traffic_flag = True
for time, tx_bytes in stats:
if no_traffic_flag:
if tx_bytes > (prev_tx+100000):
no_traffic_flag = False
start_time, prev_tx = time, tx_bytes
else:
print (time-start_time), (tx_bytes-prev_tx)
prev_tx = tx_bytes
if __name__ == "__main__":
main()
| merlin-lang/kulfi | experiments/testbed/results/plot/sort.py | Python | lgpl-3.0 | 989 |
# -*- coding: utf-8 -*-
"""digitalocean API to manage droplets"""
__version__ = "1.16.0"
__author__ = "Lorenzo Setale ( http://who.is.lorenzo.setale.me/? )"
__author_email__ = "lorenzo@setale.me"
__license__ = "LGPL v3"
__copyright__ = "Copyright (c) 2012-2020 Lorenzo Setale"
from .Manager import Manager
from .Droplet import Droplet, DropletError, BadKernelObject, BadSSHKeyFormat
from .Region import Region
from .Size import Size
from .Image import Image
from .Action import Action
from .Account import Account
from .Balance import Balance
from .Domain import Domain
from .Record import Record
from .SSHKey import SSHKey
from .Kernel import Kernel
from .FloatingIP import FloatingIP
from .Volume import Volume
from .baseapi import Error, EndPointError, TokenError, DataReadError, NotFoundError
from .Tag import Tag
from .LoadBalancer import LoadBalancer
from .LoadBalancer import StickySessions, ForwardingRule, HealthCheck
from .Certificate import Certificate
from .Snapshot import Snapshot
from .Project import Project
from .Firewall import Firewall, InboundRule, OutboundRule, Destinations, Sources
from .VPC import VPC
| koalalorenzo/python-digitalocean | digitalocean/__init__.py | Python | lgpl-3.0 | 1,128 |
# Copyright (c) 2010 by Yaco Sistemas <pmartin@yaco.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this programe. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('autoreports.views',
url(r'^ajax/fields/tree/$', 'reports_ajax_fields', name='reports_ajax_fields'),
url(r'^ajax/fields/options/$', 'reports_ajax_fields_options', name='reports_ajax_fields_options'),
url(r'^(category/(?P<category_key>[\w-]+)/)?$', 'reports_list', name='reports_list'),
url(r'^(?P<registry_key>[\w-]+)/$', 'reports_api', name='reports_api'),
url(r'^(?P<registry_key>[\w-]+)/(?P<report_id>\d+)/$', 'reports_api', name='reports_api'),
url(r'^(?P<registry_key>[\w-]+)/reports/$', 'reports_api_list', name='reports_api_list'),
url(r'^(?P<registry_key>[\w-]+)/wizard/$', 'reports_api_wizard', name='reports_api_wizard'),
url(r'^(?P<registry_key>[\w-]+)/wizard/(?P<report_id>\d+)/$', 'reports_api_wizard', name='reports_api_wizard'),
url(r'^(?P<app_name>[\w-]+)/(?P<model_name>[\w-]+)/$', 'reports_view', name='reports_view'),
)
| Yaco-Sistemas/django-autoreports | autoreports/urls.py | Python | lgpl-3.0 | 1,686 |
# encoding: utf-8
from __future__ import absolute_import, unicode_literals
from apiview.model import AbstractUserMixin, BaseModel
from django.contrib.auth.base_user import AbstractBaseUser
from django.db import models
class User(AbstractUserMixin, BaseModel, AbstractBaseUser):
is_staff = False
def get_short_name(self):
return self.name
def get_full_name(self):
return self.nickname
USERNAME_FIELD = 'username'
username = models.CharField('用户名', unique=True, max_length=64, editable=False, null=False, blank=False)
password = models.CharField('密码', max_length=128, unique=True, editable=False, null=False, blank=True)
nickname = models.CharField('昵称', unique=True, max_length=64, editable=False, null=False, blank=False)
class Meta:
db_table = 'example_user'
app_label = 'example_app'
verbose_name = verbose_name_plural = "用户"
| 007gzs/django_restframework_apiview | example/example_app/models.py | Python | lgpl-3.0 | 925 |
# -*- coding: utf-8 -*-
# Copyright(C) 2014 smurail
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import re
from weboob.exceptions import BrowserIncorrectPassword
from weboob.browser.pages import HTMLPage, JsonPage, pagination, LoggedPage
from weboob.browser.elements import ListElement, ItemElement, TableElement, method
from weboob.browser.filters.standard import CleanText, CleanDecimal, DateGuesser, Env, Field, Filter, Regexp, Currency, Date
from weboob.browser.filters.html import Link, Attr, TableCell
from weboob.capabilities.bank import Account, Investment
from weboob.capabilities.base import NotAvailable
from weboob.tools.capabilities.bank.transactions import FrenchTransaction
from weboob.tools.compat import urljoin
from weboob.tools.capabilities.bank.investments import is_isin_valid
__all__ = ['LoginPage']
class UselessPage(HTMLPage):
pass
class PasswordCreationPage(HTMLPage):
def get_message(self):
xpath = '//div[@class="bienvenueMdp"]/following-sibling::div'
return '%s%s' % (CleanText(xpath + '/strong')(self.doc), CleanText(xpath, children=False)(self.doc))
class ErrorPage(HTMLPage):
pass
class SubscriptionPage(LoggedPage, JsonPage):
pass
class LoginPage(HTMLPage):
pass
class CMSOPage(HTMLPage):
@property
def logged(self):
if len(self.doc.xpath('//b[text()="Session interrompue"]')) > 0:
return False
return True
class AccountsPage(CMSOPage):
TYPES = {'COMPTE CHEQUES': Account.TYPE_CHECKING,
'COMPTE TITRES': Account.TYPE_MARKET,
"ACTIV'EPARGNE": Account.TYPE_SAVINGS,
"TRESO'VIV": Account.TYPE_SAVINGS,
}
@method
class iter_accounts(ListElement):
item_xpath = '//div[has-class("groupe-comptes")]//li'
class item(ItemElement):
klass = Account
class Type(Filter):
def filter(self, label):
for pattern, actype in AccountsPage.TYPES.items():
if label.startswith(pattern):
return actype
return Account.TYPE_UNKNOWN
obj__history_url = Link('.//a[1]')
obj_id = CleanText('.//span[has-class("numero-compte")]') & Regexp(pattern=r'(\d{3,}[\w]+)', default='')
obj_label = CleanText('.//span[has-class("libelle")][1]')
obj_currency = Currency('//span[has-class("montant")]')
obj_balance = CleanDecimal('.//span[has-class("montant")]', replace_dots=True)
obj_type = Type(Field('label'))
# Last numbers replaced with XX... or we have to send sms to get RIB.
obj_iban = NotAvailable
# some accounts may appear on multiple areas, but the area where they come from is indicated
obj__owner = CleanText('(./preceding-sibling::tr[@class="LnMnTiers"])[last()]')
def validate(self, obj):
if obj.id is None:
obj.id = obj.label.replace(' ', '')
return True
def on_load(self):
if self.doc.xpath('//p[contains(text(), "incident technique")]'):
raise BrowserIncorrectPassword("Vous n'avez aucun compte sur cet espace. " \
"Veuillez choisir un autre type de compte.")
class InvestmentPage(CMSOPage):
def has_error(self):
return CleanText('//span[@id="id_error_msg"]')(self.doc)
@method
class iter_accounts(ListElement):
item_xpath = '//table[@class="Tb" and tr[1][@class="LnTit"]]/tr[@class="LnA" or @class="LnB"]'
class item(ItemElement):
klass = Account
def obj_id(self):
area_id = Regexp(CleanText('(./preceding-sibling::tr[@class="LnMnTiers"][1])//span[@class="CelMnTiersT1"]'),
r'\((\d+)\)', default='')(self)
acc_id = Regexp(CleanText('./td[1]'), r'(\d+)\s*(\d+)', r'\1\2')(self)
if area_id:
return '%s.%s' % (area_id, acc_id)
return acc_id
def obj__formdata(self):
js = Attr('./td/a[1]', 'onclick', default=None)(self)
if js is None:
return
args = re.search(r'\((.*)\)', js).group(1).split(',')
form = args[0].strip().split('.')[1]
idx = args[2].strip()
idroot = args[4].strip().replace("'", "")
return (form, idx, idroot)
obj_url = Link('./td/a[1]', default=None)
def go_account(self, form, idx, idroot):
form = self.get_form(name=form)
form['indiceCompte'] = idx
form['idRacine'] = idroot
form.submit()
class CmsoTableElement(TableElement):
head_xpath = '//table[has-class("Tb")]/tr[has-class("LnTit")]/td'
item_xpath = '//table[has-class("Tb")]/tr[has-class("LnA") or has-class("LnB")]'
class InvestmentAccountPage(CMSOPage):
@method
class iter_investments(CmsoTableElement):
col_label = 'Valeur'
col_code = 'Code'
col_quantity = 'Qté'
col_unitvalue = 'Cours'
col_valuation = 'Valorisation'
col_vdate = 'Date cours'
class item(ItemElement):
klass = Investment
obj_label = CleanText(TableCell('label'))
obj_quantity = CleanDecimal(TableCell('quantity'), replace_dots=True)
obj_unitvalue = CleanDecimal(TableCell('unitvalue'), replace_dots=True)
obj_valuation = CleanDecimal(TableCell('valuation'), replace_dots=True)
obj_vdate = Date(CleanText(TableCell('vdate')), dayfirst=True, default=NotAvailable)
def obj_code(self):
if Field('label')(self) == "LIQUIDITES":
return 'XX-liquidity'
code = CleanText(TableCell('code'))(self)
return code if is_isin_valid(code) else NotAvailable
def obj_code_type(self):
return Investment.CODE_TYPE_ISIN if is_isin_valid(Field('code')(self)) else NotAvailable
class Transaction(FrenchTransaction):
PATTERNS = [(re.compile(r'^RET DAB (?P<dd>\d{2})/?(?P<mm>\d{2})(/?(?P<yy>\d{2}))? (?P<text>.*)'),
FrenchTransaction.TYPE_WITHDRAWAL),
(re.compile(r'CARTE (?P<dd>\d{2})/(?P<mm>\d{2}) (?P<text>.*)'),
FrenchTransaction.TYPE_CARD),
(re.compile(r'^(?P<category>VIR(EMEN)?T? (SEPA)?(RECU|FAVEUR)?)( /FRM)?(?P<text>.*)'),
FrenchTransaction.TYPE_TRANSFER),
(re.compile(r'^PRLV (?P<text>.*)( \d+)?$'), FrenchTransaction.TYPE_ORDER),
(re.compile(r'^(CHQ|CHEQUE) .*$'), FrenchTransaction.TYPE_CHECK),
(re.compile(r'^(AGIOS /|FRAIS) (?P<text>.*)'), FrenchTransaction.TYPE_BANK),
(re.compile(r'^(CONVENTION \d+ |F )?COTIS(ATION)? (?P<text>.*)'),
FrenchTransaction.TYPE_BANK),
(re.compile(r'^REMISE (?P<text>.*)'), FrenchTransaction.TYPE_DEPOSIT),
(re.compile(r'^(?P<text>.*)( \d+)? QUITTANCE .*'),
FrenchTransaction.TYPE_ORDER),
(re.compile(r'^.* LE (?P<dd>\d{2})/(?P<mm>\d{2})/(?P<yy>\d{2})$'),
FrenchTransaction.TYPE_UNKNOWN),
(re.compile(r'^.* PAIEMENT (?P<dd>\d{2})/(?P<mm>\d{2}) (?P<text>.*)'),
FrenchTransaction.TYPE_UNKNOWN),
]
class CmsoTransactionElement(ItemElement):
klass = Transaction
def condition(self):
return len(self.el) >= 5 and not self.el.get('id', '').startswith('libelleLong')
class HistoryPage(CMSOPage):
def get_date_range_list(self):
return [d for d in self.doc.xpath('//select[@name="date"]/option/@value') if d]
@pagination
@method
class iter_history(ListElement):
item_xpath = '//div[contains(@class, "master-table")]//ul/li'
def next_page(self):
pager = self.page.doc.xpath('//div[@class="pager"]')
if pager: # more than one page if only enough transactions
assert len(pager) == 1
next_links = pager[0].xpath('./span/following-sibling::a[@class="page"]')
if next_links:
url_next_page = Link('.')(next_links[0])
url_next_page = urljoin(self.page.url, url_next_page)
return self.page.browser.build_request(url_next_page)
class item(CmsoTransactionElement):
def date(selector):
return DateGuesser(Regexp(CleanText(selector), r'\w+ (\d{2}/\d{2})'), Env('date_guesser')) | Transaction.Date(selector)
# CAUTION: this website write a 'Date valeur' inside a div with a class == 'c-ope'
# and a 'Date opération' inside a div with a class == 'c-val'
# so actually i assume 'c-val' class is the real operation date and 'c-ope' is value date
obj_date = date('./div[contains(@class, "c-val")]')
obj_vdate = date('./div[contains(@class, "c-ope")]')
obj_raw = Transaction.Raw(Regexp(CleanText('./div[contains(@class, "c-libelle-long")]'), r'Libellé étendu (.+)'))
obj_amount = Transaction.Amount('./div[contains(@class, "c-credit")]', './div[contains(@class, "c-debit")]')
class UpdateTokenMixin(object):
def on_load(self):
if 'Authentication' in self.response.headers:
self.browser.token = self.response.headers['Authentication'].split(' ')[-1]
class SSODomiPage(JsonPage, UpdateTokenMixin):
def get_sso_url(self):
return self.doc['urlSSO']
class AuthCheckUser(HTMLPage):
pass
| laurentb/weboob | modules/cmso/pro/pages.py | Python | lgpl-3.0 | 10,804 |
# Copyright (C) 2014 Optiv, Inc. (brad.spengler@optiv.com)
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
class InjectionRWX(Signature):
name = "injection_rwx"
description = "Creates RWX memory"
severity = 2
confidence = 50
categories = ["injection"]
authors = ["Optiv"]
minimum = "1.2"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
filter_apinames = set(["NtAllocateVirtualMemory","NtProtectVirtualMemory","VirtualProtectEx"])
filter_analysistypes = set(["file"])
def on_call(self, call, process):
if call["api"] == "NtAllocateVirtualMemory" or call["api"] == "VirtualProtectEx":
protection = self.get_argument(call, "Protection")
# PAGE_EXECUTE_READWRITE
if protection == "0x00000040":
return True
elif call["api"] == "NtProtectVirtualMemory":
protection = self.get_argument(call, "NewAccessProtection")
# PAGE_EXECUTE_READWRITE
if protection == "0x00000040":
return True
| lixiangning888/whole_project | modules/signatures_orginal_20151110/injection_rwx.py | Python | lgpl-3.0 | 1,229 |
# Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
| Cangjians/pycangjie | tests/__init__.py | Python | lgpl-3.0 | 6,379 |
# -*- coding: utf-8 -*-
"""
This module put my utility functions
"""
__author__ = "Jiang Yu-Kuan <yukuan.jiang@gmail.com>"
__date__ = "2016/02/08 (initial version) ~ 2019/04/17 (last revision)"
import re
import os
import sys
#------------------------------------------------------------------------------
# File
#------------------------------------------------------------------------------
def save_utf8_file(fn, lines):
"""Save string lines into an UTF8 text files.
"""
with open(fn, "w") as out_file:
out_file.write("\n".join(lines).encode("utf-8"))
def main_basename(path):
r"""Return a main name of a basename of a given file path.
Example
-------
>>> main_basename('c:\code\langconv\MsgID.h')
'MsgID.h'
"""
base = os.path.basename(path)
base_main, _base_ext = os.path.splitext(base)
return base_main
#------------------------------------------------------------------------------
# Math
#------------------------------------------------------------------------------
def is_numeric(str):
try:
_offset = int(eval(str))
except:
return False
return True
#------------------------------------------------------------------------------
# String
#------------------------------------------------------------------------------
def replace_chars(text, replaced_pairs='', deleted_chars=''):
"""Return a char replaced text.
Arguments
---------
text -- the text
replaced_pairs -- the replaced chars
Example
-------
>>> replaced = [('a','b'), ('c','d')]
>>> removed = 'e'
>>> replace_chars('abcde', replaced, removed)
'bbdd'
"""
for old, new in replaced_pairs:
text = text.replace(old, new)
for ch in deleted_chars:
text = text.replace(ch, '')
return text
def camel_case(string):
"""Return camel case string from a space-separated string.
Example
-------
>>> camel_case('good job')
'GoodJob'
"""
return ''.join(w.capitalize() for w in string.split())
def replace_punctuations(text):
"""Replace punctuation characters with abbreviations for a string.
"""
punctuations = [
('?', 'Q'), # Q: question mark
('.', 'P'), # P: period; full stop
('!', 'E'), # E: exclamation mark
("'", 'SQ'), # SQ: single quotation mark; single quote
('"', 'DQ'), # DQ: double quotation mark; double quotes
('(', 'LP'), # LP: left parenthese
(')', 'RP'), # RP: right parenthese
(':', 'Cn'), # Cn: colon
(',', 'Ca'), # Ca: comma
(';', 'S'), # S: semicolon
]
deleted = '+-*/^=%$#@|\\<>{}[]'
return replace_chars(text, punctuations, deleted)
def remain_alnum(text):
"""Remain digits and English letters of a string.
"""
return ''.join(c for c in text if c.isalnum()
and ord(' ') <= ord(c) <= ord('z'))
#------------------------------------------------------------------------------
# For code generation
#------------------------------------------------------------------------------
def c_identifier(text):
"""Convert input text into an legal identifier in C.
Example
-------
>>> c_identifier("Hello World")
'HelloWorld'
>>> c_identifier("Anti-Shake")
'Antishake'
"""
if ' ' in text:
text = camel_case(text)
text = re.sub(r'\+\d+', lambda x: x.group().replace('+', 'P'), text)
text = re.sub(r'\-\d+', lambda x: x.group().replace('-', 'N'), text)
text = replace_punctuations(text)
return remain_alnum(text)
def wrap_header_guard(lines, h_fn):
"""Wrap a C header guard for a given line list.
"""
def underscore(txt):
"""Return an under_scores text from a CamelCase text.
This function will leave a CamelCase text unchanged.
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', txt)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
h_fn_sig = '%s_H_' % underscore(main_basename(h_fn)).upper()
begin = ['#ifndef %s' % h_fn_sig]
begin += ['#define %s' % h_fn_sig, '', '']
end = ['', '', '#endif // %s' % h_fn_sig, '']
return begin + lines + end
def prefix_info(lines, software, version, author, comment_mark='//'):
"""Prefix information to the given lines with given comment-mark.
"""
prefix = ['%s Generated by the %s v%s' % (comment_mark,
software, version)]
prefix += ['%s !author: %s' % (comment_mark, author)]
prefix += ['%s !trail: %s %s' % (comment_mark,
os.path.basename(sys.argv[0]), ' '.join(sys.argv[1:]))]
return prefix + lines
| YorkJong/pyResourceLink | reslnk/myutil.py | Python | lgpl-3.0 | 4,850 |
" Settings for tests. "
from settings.project import *
# Databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'USER': '',
'PASSWORD': '',
'TEST_CHARSET': 'utf8',
}}
# Caches
CACHES['default']['BACKEND'] = 'django.core.cache.backends.locmem.LocMemCache'
CACHES['default']['KEY_PREFIX'] = '_'.join((PROJECT_NAME, 'TST'))
# pymode:lint_ignore=W404
| klen/makesite | makesite/modules/django/settings/test.py | Python | lgpl-3.0 | 440 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"Visual Property Editor (using wx PropertyGrid) of gui2py's components"
__author__ = "Mariano Reingart (reingart@gmail.com)"
__copyright__ = "Copyright (C) 2013- Mariano Reingart"
__license__ = "LGPL 3.0"
# some parts where inspired or borrowed from wxFormBuilders & wxPython examples
import sys, time, math, os, os.path
import wx
_ = wx.GetTranslation
import wx.propgrid as wxpg
from gui.component import InitSpec, StyleSpec, Spec, EventSpec, DimensionSpec
from gui.font import Font
DEBUG = False
class PropertyEditorPanel(wx.Panel):
def __init__( self, parent, log ):
wx.Panel.__init__(self, parent, wx.ID_ANY)
self.log = log
self.callback = None
self.panel = panel = wx.Panel(self, wx.ID_ANY)
topsizer = wx.BoxSizer(wx.VERTICAL)
# Difference between using PropertyGridManager vs PropertyGrid is that
# the manager supports multiple pages and a description box.
self.pg = pg = wxpg.PropertyGrid(panel,
style=wxpg.PG_SPLITTER_AUTO_CENTER |
wxpg.PG_AUTO_SORT |
wxpg.PG_TOOLBAR)
# Show help as tooltips
pg.SetExtraStyle(wxpg.PG_EX_HELP_AS_TOOLTIPS)
pg.Bind( wxpg.EVT_PG_CHANGED, self.OnPropGridChange )
pg.Bind( wxpg.EVT_PG_PAGE_CHANGED, self.OnPropGridPageChange )
pg.Bind( wxpg.EVT_PG_SELECTED, self.OnPropGridSelect )
pg.Bind( wxpg.EVT_PG_RIGHT_CLICK, self.OnPropGridRightClick )
##pg.AddPage( "Page 1 - Testing All" )
# store the property grid for future reference
self.pg = pg
# load empty object (just draws categories)
self.load_object(None)
# sizing stuff:
topsizer.Add(pg, 1, wx.EXPAND)
panel.SetSizer(topsizer)
topsizer.SetSizeHints(panel)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(panel, 1, wx.EXPAND)
self.SetSizer(sizer)
self.SetAutoLayout(True)
def load_object(self, obj, callback=None):
pg = self.pg # get the property grid reference
self.callback = callback # store the update method
# delete all properties
pg.Clear()
# clean references and aux structures
appended = set()
self.obj = obj
self.groups = {}
# loop on specs and append each property (categorized):
for i, cat, class_ in ((1, 'Init Specs', InitSpec),
(2, 'Dimension Specs', DimensionSpec),
(3, 'Style Specs', StyleSpec),
(5, 'Events', EventSpec),
(4, 'Basic Specs', Spec),
):
pg.Append(wxpg.PropertyCategory("%s - %s" % (i, cat)))
if obj is None:
continue
specs = sorted(obj._meta.specs.items(), key=lambda it: it[0])
for name, spec in specs:
if DEBUG: print "setting prop", spec, class_, spec.type
if isinstance(spec, class_):
prop = {'string': wxpg.StringProperty,
'integer': wxpg.IntProperty,
'float': wxpg.FloatProperty,
'boolean': wxpg.BoolProperty,
'text': wxpg.LongStringProperty,
'code': wxpg.LongStringProperty,
'enum': wxpg.EnumProperty,
'edit_enum': wxpg.EditEnumProperty,
'expr': wxpg.StringProperty,
'array': wxpg.ArrayStringProperty,
'font': wxpg.FontProperty,
'image_file': wxpg.ImageFileProperty,
'colour': wxpg.ColourProperty}.get(spec.type)
if prop and name not in appended:
value = getattr(obj, name)
if DEBUG: print "name", name, value
if spec.type == "code" and value is None:
value = ""
if spec.type == "boolean" and value is None:
value = False
if spec.type == "integer" and value is None:
value = -1
if spec.type in ("string", "text") and value is None:
value = ""
if spec.type == "expr":
value = repr(value)
if spec.type == "font":
if value is None:
value = wx.NullFont
else:
value = value.get_wx_font()
if callable(value):
# event binded at runtime cannot be modified:
value = str(value)
readonly = True
else:
readonly = False
if spec.type == "enum":
prop = prop(name, name,
spec.mapping.keys(),
spec.mapping.values(),
value=spec.mapping.get(value, 0))
elif spec.type == "edit_enum":
prop = prop(name, name,
spec.mapping.keys(),
range(len(spec.mapping.values())),
value=spec.mapping[value])
else:
try:
prop = prop(name, value=value)
except Exception, e:
print "CANNOT LOAD PROPERTY", name, value, e
prop.SetPyClientData(spec)
appended.add(name)
if spec.group is None:
pg.Append(prop)
if readonly:
pg.SetPropertyReadOnly(prop)
else:
# create a group hierachy (wxpg uses dot notation)
group = ""
prop_parent = None
for grp in spec.group.split("."):
prev_group = group # ancestor
group += ("." if group else "") + grp # path
if group in self.groups:
prop_parent = self.groups[group]
else:
prop_group = wxpg.StringProperty(grp,
value="<composed>")
if not prop_parent:
pg.Append(prop_group)
else:
pg.AppendIn(prev_group, prop_group)
prop_parent = prop_group
self.groups[group] = prop_parent
pg.SetPropertyReadOnly(group)
pg.AppendIn(spec.group, prop)
pg.Collapse(spec.group)
name = spec.group + "." + name
if spec.type == "boolean":
pg.SetPropertyAttribute(name, "UseCheckbox", True)
doc = spec.__doc__
if doc:
pg.SetPropertyHelpString(name, doc)
def edit(self, name=""):
"Programatically select a (default) property to start editing it"
# for more info see DoSelectAndEdit in propgrid.cpp
for name in (name, "label", "value", "text", "title", "filename",
"name"):
prop = self.pg.GetPropertyByName(name)
if prop is not None:
break
self.Parent.SetFocus()
self.Parent.Raise()
self.pg.SetFocus()
# give time to the ui to show the prop grid and set focus:
wx.CallLater(250, self.select, prop.GetName())
def select(self, name, flags=0):
"Select a property (and start the editor)"
# do not call this directly from another window, use edit() instead
# // wxPropertyGrid::DoSelectProperty flags (selFlags) -see propgrid.h-
wxPG_SEL_FOCUS=0x0001 # Focuses to created editor
wxPG_SEL_FORCE=0x0002 # Forces deletion and recreation of editor
flags |= wxPG_SEL_FOCUS # | wxPG_SEL_FORCE
prop = self.pg.GetPropertyByName(name)
self.pg.SelectProperty(prop, flags)
if DEBUG: print "selected!", prop
def OnPropGridChange(self, event):
p = event.GetProperty()
if DEBUG: print "change!", p
if p:
name = p.GetName()
spec = p.GetPyClientData()
if spec and 'enum' in spec.type:
value = p.GetValueAsString()
else:
value = p.GetValue()
#self.log.write(u'%s changed to "%s"\n' % (p,p.GetValueAsString()))
# if it a property child (parent.child), extract its name
if "." in name:
name = name[name.rindex(".") + 1:]
if spec and not name in self.groups:
if name == 'font': # TODO: detect property type
# create a gui font from the wx.Font
font = Font()
font.set_wx_font(value)
value = font
# expressions must be evaluated to store the python object
if spec.type == "expr":
value = eval(value)
# re-create the wx_object with the new property value
# (this is required at least to apply new styles and init specs)
if DEBUG: print "changed", self.obj.name
kwargs = {str(name): value}
wx.CallAfter(self.obj.rebuild, **kwargs)
if name == 'name':
wx.CallAfter(self.callback, **dict(name=self.obj.name))
def OnPropGridSelect(self, event):
p = event.GetProperty()
if p:
self.log.write(u'%s selected\n' % (event.GetProperty().GetName()))
else:
self.log.write(u'Nothing selected\n')
def OnDeleteProperty(self, event):
p = self.pg.GetSelectedProperty()
if p:
self.pg.DeleteProperty(p)
else:
wx.MessageBox("First select a property to delete")
def OnReserved(self, event):
pass
def OnPropGridRightClick(self, event):
p = event.GetProperty()
if p:
self.log.write(u'%s right clicked\n' % (event.GetProperty().GetName()))
else:
self.log.write(u'Nothing right clicked\n')
#self.obj.get_parent().Refresh()
def OnPropGridPageChange(self, event):
index = self.pg.GetSelectedPage()
self.log.write('Page Changed to \'%s\'\n' % (self.pg.GetPageName(index)))
if __name__ == '__main__':
import sys,os
app = wx.App()
f = wx.Frame(None)
from gui.controls import Button, Label, TextBox, CheckBox, ListBox, ComboBox
frame = wx.Frame(None)
#o = Button(frame, name="btnTest", label="click me!", default=True)
#o = Label(frame, name="lblTest", alignment="right", size=(-1, 500), text="hello!")
o = TextBox(frame, name="txtTest", border=False, text="hello world!")
#o = CheckBox(frame, name="chkTest", border='none', label="Check me!")
#o = ListBox(frame, name="lstTest", border='none',
# items={'datum1': 'a', 'datum2':'b', 'datum3':'c'},
# multiselect="--multiselect" in sys.argv)
#o = ComboBox(frame, name="cboTest",
# items={'datum1': 'a', 'datum2':'b', 'datum3':'c'},
# readonly='--readonly' in sys.argv,
# )
frame.Show()
log = sys.stdout
w = PropertyEditorPanel(f, log)
w.load_object(o)
f.Show()
app.MainLoop()
| reingart/gui2py | gui/tools/propeditor.py | Python | lgpl-3.0 | 12,658 |
#!/usr/bin/python3
import sys
from pathlib import Path
list_scope_path = Path("./list_scope_tokens.txt")
keyword_bit = 13
list_scope_bit = 14
def main():
if len(sys.argv) < 2:
print("Error: Must specify an argument of either 'tokens' or 'emitters'!", file=sys.stderr)
return 1
list_scopes = set()
with list_scope_path.open('r') as f:
for line in f:
line = line.strip()
if line.startswith('#') or len(line) == 0:
continue
list_scopes.add(line)
max_kw_len = max( len(kw) for kw in list_scopes )
if sys.argv[1] == 'tokens':
t_id = (1 << (keyword_bit - 1)) | (1 << (list_scope_bit-1))
for t in sorted(list_scopes):
print(' {:<{width}} = 0x{:4X};'.format(t.upper(), t_id, width=max_kw_len))
t_id += 1
elif sys.argv[1] == 'emitters':
for t in sorted(list_scopes):
print(' {:<{width}} => T_{}(Lexeme);'.format('"' + t + '"', t.upper(), width = max_kw_len + 2))
else:
print("Error: Must specify an argument of either 'tokens' or 'emitters'!", file=sys.stderr)
return 1
return 0
if __name__ == '__main__':
sys.exit(main())
| zijistark/zckTools | src/zck/token_codegen.py | Python | lgpl-3.0 | 1,078 |
"""
Copyright (C) 2013 Matthew Woodruff
This script is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This script is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this script. If not, see <http://www.gnu.org/licenses/>.
===========================================================
Coming in: one of 36 algo/problem combinations. 50 seeds in
one file. Also the _Sobol file specifying the
parameterization for each row, as well as the parameters
file itself.
Going out: stats: mean, quantile, variance
grouped by parameterization
grouped by some or all 2d combinations of
parameters
"""
import argparse
import pandas
import numpy
import re
import os
import copy
def is_quantile(stat):
return re.match("q[0-9][0-9]?$", stat)
def is_stat(stat):
if stat in ["mean", "variance", "min", "max", "q100"]:
return stat
elif is_quantile(stat):
return stat
else:
raise argparse.ArgumentTypeError(
"Invalid statistic {0}".format(stat))
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("data",
type=argparse.FileType("r"),
help="data file to be summarized."
"Should have columns seed, "\
"set, and metrics columns.")
parser.add_argument("parameterizations",
type=argparse.FileType("r"),
help="file containing parameter"\
"izations. Number of param"\
"eterizations should be the "\
"same as number of rows per "\
"seed in the data file."
)
parser.add_argument("parameters",
type=argparse.FileType("r"),
help="file describing parameters. "\
"Should have as many rows as "\
"parameterizations file has "\
"columns."
)
stats = ["mean", "variance", "q10", "q50", "q90"]
parser.add_argument("-s", "--stats", nargs="+",
default = stats, type = is_stat,
help="statistics to compute")
parser.add_argument("-g", "--group", nargs="+",
help="parameters by which to "\
"group. Names should be "\
"found in the parameters "\
"file. "
)
parser.add_argument("-d", "--deltas",
help="If group is specified, "\
"deltas may be used to impose "\
"grid boxes on the summary "\
"rather than using point "\
"values.",
nargs="+", type = float
)
parser.add_argument("-o", "--output-directory",
default="/gpfs/scratch/mjw5407/"
"task1/stats/"
)
return parser.parse_args()
def compute(data, stat):
if stat == "mean":
return data.mean()
if stat == "variance":
return data.var()
if is_quantile(stat):
quantile = float(stat[1:]) / 100.0
if quantile == 0.0:
return data.min()
return data.quantile(quantile)
if stat == "max" or stat == "q100":
return data.max()
if stat == "min":
return data.min()
def analyze(data, stats, group=None, deltas=None):
results = []
if group is None:
group = ["Set"]
togroupby = copy.copy(group)
ii = 0
if deltas is None:
togroupby = group
else:
while ii < len(group) and ii < len(deltas):
colname = "grid_{0}".format(group[ii])
gridnumbers = numpy.floor(data[group[ii]].apply(
lambda val: val / deltas[ii]))
data[colname] = gridnumbers.apply(
lambda val: val * deltas[ii])
togroupby[ii] = colname
ii += 1
print "analyzing grouped by {0}".format(group)
gb = data.groupby(togroupby)
for stat in stats:
print "computing {0}".format(stat)
tag = "{0}_{1}".format("_".join(group), stat)
results.append((tag, compute(gb, stat)))
return results
def write_result(infn, result, outputdir):
fn = "_".join([result[0], os.path.basename(infn)])
fn = re.sub("\.hv$", "", fn)
fn = os.path.join(outputdir, fn)
print "writing {0}".format(fn)
result[1].to_csv(fn, sep=" ", index=True)
def cli():
args = get_args()
data = pandas.read_table(args.data, sep=" ")
parameters = pandas.read_table(
args.parameters, sep=" ",
names=["name","low","high"],
header=None)
param_names = parameters["name"].values
parameterizations = pandas.read_table(
args.parameterizations,
sep=" ",
names = param_names,
header = None)
data = data.join(parameterizations, on=["Set"],
how="outer")
if args.deltas is not None:
deltas = args.deltas
else:
deltas = []
results = analyze(data, args.stats, args.group, deltas)
for result in results:
write_result(args.data.name, result,
args.output_directory)
if __name__ == "__main__":
cli()
# vim:ts=4:sw=4:expandtab:ai:colorcolumn=60:number:fdm=indent
| matthewjwoodruff/moeasensitivity | statistics/statistics.py | Python | lgpl-3.0 | 6,225 |
# (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\
fc_rules_cf_fc.build_auxilliary_coordinate`.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import numpy as np
import mock
from iris.coords import AuxCoord
from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \
build_auxiliary_coordinate
class TestBoundsVertexDim(tests.IrisTest):
def setUp(self):
# Create coordinate cf variables and pyke engine.
points = np.arange(6).reshape(2, 3)
self.cf_coord_var = mock.Mock(
dimensions=('foo', 'bar'),
cf_name='wibble',
standard_name=None,
long_name='wibble',
units='m',
shape=points.shape,
dtype=points.dtype,
__getitem__=lambda self, key: points[key])
self.engine = mock.Mock(
cube=mock.Mock(),
cf_var=mock.Mock(dimensions=('foo', 'bar')),
filename='DUMMY',
provides=dict(coordinates=[]))
# Create patch for deferred loading that prevents attempted
# file access. This assumes that self.cf_bounds_var is
# defined in the test case.
def patched__getitem__(proxy_self, keys):
variable = None
for var in (self.cf_coord_var, self.cf_bounds_var):
if proxy_self.variable_name == var.cf_name:
return var[keys]
raise RuntimeError()
self.deferred_load_patch = mock.patch(
'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__',
new=patched__getitem__)
def test_slowest_varying_vertex_dim(self):
# Create the bounds cf variable.
bounds = np.arange(24).reshape(4, 2, 3)
self.cf_bounds_var = mock.Mock(
dimensions=('nv', 'foo', 'bar'),
cf_name='wibble_bnds',
shape=bounds.shape,
dtype=bounds.dtype,
__getitem__=lambda self, key: bounds[key])
# Expected bounds on the resulting coordinate should be rolled so that
# the vertex dimension is at the end.
expected_bounds = np.rollaxis(bounds, 0, bounds.ndim)
expected_coord = AuxCoord(
self.cf_coord_var[:],
long_name=self.cf_coord_var.long_name,
var_name=self.cf_coord_var.cf_name,
units=self.cf_coord_var.units,
bounds=expected_bounds)
# Patch the helper function that retrieves the bounds cf variable.
# This avoids the need for setting up further mocking of cf objects.
get_cf_bounds_var_patch = mock.patch(
'iris.fileformats._pyke_rules.compiled_krb.'
'fc_rules_cf_fc.get_cf_bounds_var',
return_value=self.cf_bounds_var)
# Asserts must lie within context manager because of deferred loading.
with self.deferred_load_patch, get_cf_bounds_var_patch:
build_auxiliary_coordinate(self.engine, self.cf_coord_var)
# Test that expected coord is built and added to cube.
self.engine.cube.add_aux_coord.assert_called_with(
expected_coord, [0, 1])
# Test that engine.provides container is correctly populated.
expected_list = [(expected_coord, self.cf_coord_var.cf_name)]
self.assertEqual(self.engine.provides['coordinates'],
expected_list)
def test_fastest_varying_vertex_dim(self):
bounds = np.arange(24).reshape(2, 3, 4)
self.cf_bounds_var = mock.Mock(
dimensions=('foo', 'bar', 'nv'),
cf_name='wibble_bnds',
shape=bounds.shape,
dtype=bounds.dtype,
__getitem__=lambda self, key: bounds[key])
expected_coord = AuxCoord(
self.cf_coord_var[:],
long_name=self.cf_coord_var.long_name,
var_name=self.cf_coord_var.cf_name,
units=self.cf_coord_var.units,
bounds=bounds)
get_cf_bounds_var_patch = mock.patch(
'iris.fileformats._pyke_rules.compiled_krb.'
'fc_rules_cf_fc.get_cf_bounds_var',
return_value=self.cf_bounds_var)
# Asserts must lie within context manager because of deferred loading.
with self.deferred_load_patch, get_cf_bounds_var_patch:
build_auxiliary_coordinate(self.engine, self.cf_coord_var)
# Test that expected coord is built and added to cube.
self.engine.cube.add_aux_coord.assert_called_with(
expected_coord, [0, 1])
# Test that engine.provides container is correctly populated.
expected_list = [(expected_coord, self.cf_coord_var.cf_name)]
self.assertEqual(self.engine.provides['coordinates'],
expected_list)
def test_fastest_with_different_dim_names(self):
# Despite the dimension names ('x', and 'y') differing from the coord's
# which are 'foo' and 'bar' (as permitted by the cf spec),
# this should still work because the vertex dim is the fastest varying.
bounds = np.arange(24).reshape(2, 3, 4)
self.cf_bounds_var = mock.Mock(
dimensions=('x', 'y', 'nv'),
cf_name='wibble_bnds',
shape=bounds.shape,
dtype=bounds.dtype,
__getitem__=lambda self, key: bounds[key])
expected_coord = AuxCoord(
self.cf_coord_var[:],
long_name=self.cf_coord_var.long_name,
var_name=self.cf_coord_var.cf_name,
units=self.cf_coord_var.units,
bounds=bounds)
get_cf_bounds_var_patch = mock.patch(
'iris.fileformats._pyke_rules.compiled_krb.'
'fc_rules_cf_fc.get_cf_bounds_var',
return_value=self.cf_bounds_var)
# Asserts must lie within context manager because of deferred loading.
with self.deferred_load_patch, get_cf_bounds_var_patch:
build_auxiliary_coordinate(self.engine, self.cf_coord_var)
# Test that expected coord is built and added to cube.
self.engine.cube.add_aux_coord.assert_called_with(
expected_coord, [0, 1])
# Test that engine.provides container is correctly populated.
expected_list = [(expected_coord, self.cf_coord_var.cf_name)]
self.assertEqual(self.engine.provides['coordinates'],
expected_list)
if __name__ == '__main__':
tests.main()
| jkettleb/iris | lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py | Python | lgpl-3.0 | 7,441 |
import os
import platform
from setuptools import setup, Extension
from distutils.util import convert_path
from Cython.Build import cythonize
system = platform.system()
## paths settings
# Linux
if 'Linux' in system:
CLFFT_DIR = r'/home/gregor/devel/clFFT'
CLFFT_LIB_DIRS = [r'/usr/local/lib64']
CLFFT_INCL_DIRS = [os.path.join(CLFFT_DIR, 'src', 'include'), ]
CL_INCL_DIRS = ['/opt/AMDAPPSDK-3.0/include']
EXTRA_COMPILE_ARGS = []
EXTRA_LINK_ARGS = []
#Windows
elif 'Windows' in system:
CLFFT_DIR = r'C:\Users\admin\Devel\clFFT-Full-2.12.2-Windows-x64'
CLFFT_LIB_DIRS = [os.path.join(CLFFT_DIR, 'lib64\import')]
CLFFT_INCL_DIRS = [os.path.join(CLFFT_DIR, 'include'), ]
CL_DIR = os.getenv('AMDAPPSDKROOT')
CL_INCL_DIRS = [os.path.join(CL_DIR, 'include')]
EXTRA_COMPILE_ARGS = []
EXTRA_LINK_ARGS = []
# macOS
elif 'Darwin' in system:
CLFFT_DIR = r'/Users/gregor/Devel/clFFT'
CLFFT_LIB_DIRS = [r'/Users/gregor/Devel/clFFT/src/library']
CLFFT_INCL_DIRS = [os.path.join(CLFFT_DIR, 'src', 'include'), ]
CL_INCL_DIRS = []
EXTRA_COMPILE_ARGS = ['-stdlib=libc++']
EXTRA_LINK_ARGS = ['-stdlib=libc++']
import Cython.Compiler.Options
Cython.Compiler.Options.generate_cleanup_code = 2
extensions = [
Extension("gpyfft.gpyfftlib",
[os.path.join('gpyfft', 'gpyfftlib.pyx')],
include_dirs= CLFFT_INCL_DIRS + CL_INCL_DIRS,
extra_compile_args=EXTRA_COMPILE_ARGS,
extra_link_args=EXTRA_LINK_ARGS,
libraries=['clFFT'],
library_dirs = CLFFT_LIB_DIRS,
language='c++',
)
]
def copy_clfftdll_to_package():
import shutil
shutil.copy(
os.path.join(CLFFT_DIR, 'bin', 'clFFT.dll'),
'gpyfft')
shutil.copy(
os.path.join(CLFFT_DIR, 'bin', 'StatTimer.dll'),
'gpyfft')
print("copied clFFT.dll, StatTimer.dll")
package_data = {}
if 'Windows' in platform.system():
copy_clfftdll_to_package()
package_data.update({'gpyfft': ['clFFT.dll', 'StatTimer.dll']},)
def get_version():
main_ns = {}
version_path = convert_path('gpyfft/version.py')
with open(version_path) as version_file:
exec(version_file.read(), main_ns)
version = main_ns['__version__']
return version
def get_readme():
dirname = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(dirname, "README.md"), "r") as fp:
long_description = fp.read()
return long_description
install_requires = ["numpy", "pyopencl"]
setup_requires = ["numpy", "cython"]
setup(
name='gpyfft',
version=get_version(),
description='A Python wrapper for the OpenCL FFT library clFFT',
long_description=get_readme(),
url=r"https://github.com/geggo/gpyfft",
maintainer='Gregor Thalhammer',
maintainer_email='gregor.thalhammer@gmail.com',
license='LGPL',
packages=['gpyfft', "gpyfft.test"],
ext_modules=cythonize(extensions),
package_data=package_data,
install_requires=install_requires,
setup_requires=setup_requires,
)
| geggo/gpyfft | setup.py | Python | lgpl-3.0 | 3,106 |
# BlenderBIM Add-on - OpenBIM Blender Add-on
# Copyright (C) 2021 Dion Moult <dion@thinkmoult.com>
#
# This file is part of BlenderBIM Add-on.
#
# BlenderBIM Add-on is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BlenderBIM Add-on is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BlenderBIM Add-on. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import bpy
import pytest
import webbrowser
import blenderbim
import ifcopenshell
import ifcopenshell.util.representation
from blenderbim.bim.ifc import IfcStore
from mathutils import Vector
# Monkey-patch webbrowser opening since we want to test headlessly
webbrowser.open = lambda x: True
variables = {"cwd": os.getcwd(), "ifc": "IfcStore.get_file()"}
class NewFile:
@pytest.fixture(autouse=True)
def setup(self):
IfcStore.purge()
bpy.ops.wm.read_homefile(app_template="")
if bpy.data.objects:
bpy.data.batch_remove(bpy.data.objects)
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
blenderbim.bim.handler.setDefaultProperties(None)
class NewIfc:
@pytest.fixture(autouse=True)
def setup(self):
IfcStore.purge()
bpy.ops.wm.read_homefile(app_template="")
bpy.data.batch_remove(bpy.data.objects)
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
blenderbim.bim.handler.setDefaultProperties(None)
bpy.ops.bim.create_project()
def scenario(function):
def subfunction(self):
run(function(self))
return subfunction
def scenario_debug(function):
def subfunction(self):
run_debug(function(self))
return subfunction
def an_empty_ifc_project():
bpy.ops.bim.create_project()
def i_add_a_cube():
bpy.ops.mesh.primitive_cube_add()
def i_add_a_cube_of_size_size_at_location(size, location):
bpy.ops.mesh.primitive_cube_add(size=float(size), location=[float(co) for co in location.split(",")])
def the_object_name_is_selected(name):
i_deselect_all_objects()
additionally_the_object_name_is_selected(name)
def additionally_the_object_name_is_selected(name):
obj = bpy.context.scene.objects.get(name)
if not obj:
assert False, 'The object "{name}" could not be selected'
bpy.context.view_layer.objects.active = obj
obj.select_set(True)
def i_deselect_all_objects():
bpy.context.view_layer.objects.active = None
bpy.ops.object.select_all(action="DESELECT")
def i_am_on_frame_number(number):
bpy.context.scene.frame_set(int(number))
def i_set_prop_to_value(prop, value):
try:
eval(f"bpy.context.{prop}")
except:
assert False, "Property does not exist"
try:
exec(f'bpy.context.{prop} = "{value}"')
except:
exec(f"bpy.context.{prop} = {value}")
def prop_is_value(prop, value):
is_value = False
try:
exec(f'assert bpy.context.{prop} == "{value}"')
is_value = True
except:
try:
exec(f"assert bpy.context.{prop} == {value}")
is_value = True
except:
try:
exec(f"assert list(bpy.context.{prop}) == {value}")
is_value = True
except:
pass
if not is_value:
actual_value = eval(f"bpy.context.{prop}")
assert False, f"Value is {actual_value}"
def i_enable_prop(prop):
exec(f"bpy.context.{prop} = True")
def i_press_operator(operator):
if "(" in operator:
exec(f"bpy.ops.{operator}")
else:
exec(f"bpy.ops.{operator}()")
def i_rename_the_object_name1_to_name2(name1, name2):
the_object_name_exists(name1).name = name2
def the_object_name_exists(name):
obj = bpy.data.objects.get(name)
if not obj:
assert False, f'The object "{name}" does not exist'
return obj
def an_ifc_file_exists():
ifc = IfcStore.get_file()
if not ifc:
assert False, "No IFC file is available"
return ifc
def an_ifc_file_does_not_exist():
ifc = IfcStore.get_file()
if ifc:
assert False, "An IFC is available"
def the_object_name_does_not_exist(name):
assert bpy.data.objects.get(name) is None, "Object exists"
def the_object_name_is_an_ifc_class(name, ifc_class):
ifc = an_ifc_file_exists()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
assert element.is_a(ifc_class), f'Object "{name}" is an {element.is_a()}'
def the_object_name_is_not_an_ifc_element(name):
id = the_object_name_exists(name).BIMObjectProperties.ifc_definition_id
assert id == 0, f"The ID is {id}"
def the_object_name_is_in_the_collection_collection(name, collection):
assert collection in [c.name for c in the_object_name_exists(name).users_collection]
def the_object_name_is_not_in_the_collection_collection(name, collection):
assert collection not in [c.name for c in the_object_name_exists(name).users_collection]
def the_object_name_has_a_body_of_value(name, value):
assert the_object_name_exists(name).data.body == value
def the_collection_name1_is_in_the_collection_name2(name1, name2):
assert bpy.data.collections.get(name2).children.get(name1)
def the_collection_name1_is_not_in_the_collection_name2(name1, name2):
assert not bpy.data.collections.get(name2).children.get(name1)
def the_object_name_is_placed_in_the_collection_collection(name, collection):
obj = the_object_name_exists(name)
[c.objects.unlink(obj) for c in obj.users_collection]
bpy.data.collections.get(collection).objects.link(obj)
def the_object_name_has_a_type_representation_of_context(name, type, context):
ifc = an_ifc_file_exists()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
context, subcontext, target_view = context.split("/")
assert ifcopenshell.util.representation.get_representation(
element, context, subcontext or None, target_view or None
)
def the_object_name_is_contained_in_container_name(name, container_name):
ifc = an_ifc_file_exists()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
container = ifcopenshell.util.element.get_container(element)
if not container:
assert False, f'Object "{name}" is not in any container'
assert container.Name == container_name, f'Object "{name}" is in {container}'
def i_duplicate_the_selected_objects():
bpy.ops.object.duplicate_move()
blenderbim.bim.handler.active_object_callback()
def i_delete_the_selected_objects():
bpy.ops.object.delete()
blenderbim.bim.handler.active_object_callback()
def the_object_name1_and_name2_are_different_elements(name1, name2):
ifc = an_ifc_file_exists()
element1 = ifc.by_id(the_object_name_exists(name1).BIMObjectProperties.ifc_definition_id)
element2 = ifc.by_id(the_object_name_exists(name2).BIMObjectProperties.ifc_definition_id)
assert element1 != element2, f"Objects {name1} and {name2} have same elements {element1} and {element2}"
def the_file_name_should_contain_value(name, value):
with open(name, "r") as f:
assert value in f.read()
def the_object_name1_has_a_boolean_difference_by_name2(name1, name2):
obj = the_object_name_exists(name1)
for modifier in obj.modifiers:
if modifier.type == "BOOLEAN" and modifier.object and modifier.object.name == name2:
return True
assert False, "No boolean found"
def the_object_name1_has_no_boolean_difference_by_name2(name1, name2):
obj = the_object_name_exists(name1)
for modifier in obj.modifiers:
if modifier.type == "BOOLEAN" and modifier.object and modifier.object.name == name2:
assert False, "A boolean was found"
def the_object_name_is_voided_by_void(name, void):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
for rel in element.HasOpenings:
if rel.RelatedOpeningElement.Name == void:
return True
assert False, "No void found"
def the_object_name_is_not_voided_by_void(name, void):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
for rel in element.HasOpenings:
if rel.RelatedOpeningElement.Name == void:
assert False, "A void was found"
def the_object_name_is_not_voided(name):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
if any(element.HasOpenings):
assert False, "An opening was found"
def the_object_name_is_not_a_void(name):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
if any(element.VoidsElements):
assert False, "A void was found"
def the_void_name_is_filled_by_filling(name, filling):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
if any(rel.RelatedBuildingElement.Name == filling for rel in element.HasFillings):
return True
assert False, "No filling found"
def the_void_name_is_not_filled_by_filling(name, filling):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
if any(rel.RelatedBuildingElement.Name == filling for rel in element.HasFillings):
assert False, "A filling was found"
def the_object_name_is_not_a_filling(name):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
if any(element.FillsVoids):
assert False, "A filling was found"
def the_object_name_should_display_as_mode(name, mode):
assert the_object_name_exists(name).display_type == mode
def the_object_name_has_number_vertices(name, number):
total = len(the_object_name_exists(name).data.vertices)
assert total == int(number), f"We found {total} vertices"
def the_object_name_is_at_location(name, location):
obj_location = the_object_name_exists(name).location
assert (
obj_location - Vector([float(co) for co in location.split(",")])
).length < 0.1, f"Object is at {obj_location}"
def the_variable_key_is_value(key, value):
variables[key] = eval(value)
definitions = {
'the variable "(.*)" is "(.*)"': the_variable_key_is_value,
"an empty IFC project": an_empty_ifc_project,
"I add a cube": i_add_a_cube,
'I add a cube of size "([0-9]+)" at "(.*)"': i_add_a_cube_of_size_size_at_location,
'the object "(.*)" is selected': the_object_name_is_selected,
'additionally the object "(.*)" is selected': additionally_the_object_name_is_selected,
"I deselect all objects": i_deselect_all_objects,
'I am on frame "([0-9]+)"': i_am_on_frame_number,
'I set "(.*)" to "(.*)"': i_set_prop_to_value,
'"(.*)" is "(.*)"': prop_is_value,
'I enable "(.*)"': i_enable_prop,
'I press "(.*)"': i_press_operator,
'I rename the object "(.*)" to "(.*)"': i_rename_the_object_name1_to_name2,
'the object "(.*)" exists': the_object_name_exists,
'the object "(.*)" does not exist': the_object_name_does_not_exist,
'the object "(.*)" is an "(.*)"': the_object_name_is_an_ifc_class,
'the object "(.*)" is not an IFC element': the_object_name_is_not_an_ifc_element,
'the object "(.*)" is in the collection "(.*)"': the_object_name_is_in_the_collection_collection,
'the object "(.*)" is not in the collection "(.*)"': the_object_name_is_not_in_the_collection_collection,
'the object "(.*)" has a body of "(.*)"': the_object_name_has_a_body_of_value,
'the collection "(.*)" is in the collection "(.*)"': the_collection_name1_is_in_the_collection_name2,
'the collection "(.*)" is not in the collection "(.*)"': the_collection_name1_is_not_in_the_collection_name2,
"an IFC file exists": an_ifc_file_exists,
"an IFC file does not exist": an_ifc_file_does_not_exist,
'the object "(.*)" has a "(.*)" representation of "(.*)"': the_object_name_has_a_type_representation_of_context,
'the object "(.*)" is placed in the collection "(.*)"': the_object_name_is_placed_in_the_collection_collection,
'the object "(.*)" is contained in "(.*)"': the_object_name_is_contained_in_container_name,
"I duplicate the selected objects": i_duplicate_the_selected_objects,
"I delete the selected objects": i_delete_the_selected_objects,
'the object "(.*)" and "(.*)" are different elements': the_object_name1_and_name2_are_different_elements,
'the file "(.*)" should contain "(.*)"': the_file_name_should_contain_value,
'the object "(.*)" has a boolean difference by "(.*)"': the_object_name1_has_a_boolean_difference_by_name2,
'the object "(.*)" has no boolean difference by "(.*)"': the_object_name1_has_no_boolean_difference_by_name2,
'the object "(.*)" is voided by "(.*)"': the_object_name_is_voided_by_void,
'the object "(.*)" is not voided by "(.*)"': the_object_name_is_not_voided_by_void,
'the object "(.*)" is not a void': the_object_name_is_not_a_void,
'the object "(.*)" is not voided': the_object_name_is_not_voided,
'the object "(.*)" should display as "(.*)"': the_object_name_should_display_as_mode,
'the object "(.*)" has "([0-9]+)" vertices': the_object_name_has_number_vertices,
'the object "(.*)" is at "(.*)"': the_object_name_is_at_location,
"nothing interesting happens": lambda: None,
'the void "(.*)" is filled by "(.*)"': the_void_name_is_filled_by_filling,
'the void "(.*)" is not filled by "(.*)"': the_void_name_is_not_filled_by_filling,
'the object "(.*)" is not a filling': the_object_name_is_not_a_filling,
}
# Super lightweight Gherkin implementation
def run(scenario):
keywords = ["Given", "When", "Then", "And", "But"]
for line in scenario.split("\n"):
for key, value in variables.items():
line = line.replace("{" + key + "}", str(value))
for keyword in keywords:
line = line.replace(keyword, "")
line = line.strip()
if not line:
continue
match = None
for definition, callback in definitions.items():
match = re.search("^" + definition + "$", line)
if match:
try:
callback(*match.groups())
except AssertionError as e:
assert False, f"Failed: {line}, with error: {e}"
break
if not match:
assert False, f"Definition not implemented: {line}"
return True
def run_debug(scenario, blend_filepath=None):
try:
result = run(scenario)
except Exception as e:
if blend_filepath:
bpy.ops.wm.save_as_mainfile(filepath=blend_filepath)
assert False, e
if blend_filepath:
bpy.ops.wm.save_as_mainfile(filepath=blend_filepath)
return result
| IfcOpenShell/IfcOpenShell | src/blenderbim/test/bim/bootstrap.py | Python | lgpl-3.0 | 15,500 |
import os
import sys
import string
import random
import math
#################################################
# State
balance = 0
def deposit(amount):
global balance
balance += amount
return balance
def withdraw(amount):
global balance
balance -= amount
return balance
#################################################
# Dict like
def make_account():
return {'balance': 0}
def deposit(account, amount):
account['balance'] += amount
return account['balance']
def withdraw(account, amount):
account['balance'] -= amount
return account['balance']
# >>> a = make_account()
# >>> b = make_account()
# >>> deposit(a, 100)
# 100
# >>> deposit(b, 50)
# 50
# >>> withdraw(b, 10)
# 40
# >>> withdraw(a, 10)
# 90
#################################################
# Class
class BankAccount:
def __init__(self, balance=0):
self.balance = balance
def withdraw(self, amount):
self.balance -= amount
return self.balance
def deposit(self, amount):
self.balance += amount
return self.balance
# >>> a = BankAccount()
# >>> b = BankAccount()
# >>> a.deposit(100)
# 100
# >>> b.deposit(50)
# 50
# >>> b.withdraw(10)
# 40
# >>> a.withdraw(10)
# 90
#################################################
# Inheritance
class MinimumBalanceAccount(BankAccount):
def __init__(self, minimum_balance):
BankAccount.__init__(self)
self.minimum_balance = minimum_balance
def withdraw(self, amount):
if self.balance - amount < self.minimum_balance:
print('Sorry, minimum balance must be maintained.')
else:
BankAccount.withdraw(self, amount)
# >>> a = MinimumBalanceAccount(0)
# >>> a.deposit(100)
# 100
# >>> b.withdraw(101)
# 'Sorry, minimum balance must be maintained.'
########################################
# Mangling, Exceptions
def generate_id(n=16):
alphabet = string.ascii_letters + string.digits
return ''.join(random.choice(alphabet) for _ in range(n))
class WithdrawError(Exception):
"""Not enough money"""
def __init__(self, amount):
super().__init__()
self.amount = amount
class AdvancedBankAccount:
MAX_BALANCE = 2 ** 64
def __init__(self):
self._balance = 0
self.__id = generate_id()
def withdraw(self, amount):
if not isinstance(amount, int):
raise ValueError
if self._balance < amount:
raise WithdrawError(amount)
self._balance -= amount
return self._balance
def deposit(self, amount):
self._balance += amount
return self._balance
def get_max_balance():
return AdvancedBankAccount.MAX_BALANCE
if __name__ == '__main__':
a = AdvancedBankAccount()
b = a
c = AdvancedBankAccount()
a.deposit(10)
# AdvancedBankAccount.deposit(a, 10) # the same
print('UNACCEPTABLE! b balance:', b._balance)
# print(b.__id) # error, name mangling
a.get_id = lambda self: self.__id
# print(a.get_id()) # TypeError
# print(a.get_id(a)) # AttributeError
################################################
# UNACCEPTABLE!
print("UNACCEPTABLE! b id:", b._AdvancedBankAccount__id) # name unmangling
# static
AdvancedBankAccount.MAX_BALANCE = 2 ** 32
print('max balance:', AdvancedBankAccount.get_max_balance())
a.MAX_BALANCE = 2 ** 64
print('a max: {}, c max: {}'.format(a.MAX_BALANCE,
c.MAX_BALANCE))
################################################
# Exceptions
# in module import
try:
a.withdraw("100")
except:
pass
# UNACCEPTIBLE!
try:
a.withdraw(100)
except WithdrawError as e:
pass
try:
a.withdraw(100)
except (ValueError, WithdrawError) as e:
print('exception raised')
else:
print('no exception')
finally:
print('Finally')
def tricky():
try:
print('Tricky called')
return 1
finally:
print('Tricky finally called')
return 42
return 0
print(tricky())
# how about with statement?
# module is object -> import
class Shape:
def area(self):
raise NotImplementedError
class Circle(Shape):
def __init__(self, radius):
self.radius = radius
def area(self):
return math.pi * self.radius ** 2
class Square(Shape):
def __init__(self, side):
self.side = side
def area(self):
return self.side ** 2
if __name__ == "__main__":
a = [Square(10), Circle(2)]
s = sum(s.area() for s in a)
print(s)
| SPbAU-ProgrammingParadigms/materials | python_2/common_objects.py | Python | unlicense | 4,690 |
class PermissionRequired(Exception):
"""
Exception to be thrown by views which check permissions internally.
Takes a single C{perm} argument which defines the permission that caused
the exception.
"""
def __init__(self, perm):
self.perm = perm
def require_permissions(user, *permissions):
for perm in permissions:
if not user.has_perm(perm):
raise PermissionRequired(perm)
class checks_permissions(object):
"""
Decorator for views which handle C{PermissionRequired} errors and renders
the given error view if necessary.
The original request and arguments are passed to the error with the
additional C{_perm} and C{_view} keyword arguments.
"""
def __init__(self, view_or_error=None):
self.wrapped = callable(view_or_error)
error_view = None
if self.wrapped:
self.view = view_or_error
else:
error_view = view_or_error
if not error_view:
from django.conf import settings
error_view = settings.PERMISSIONS_VIEW
from django.core.urlresolvers import get_callable
self.error_view = get_callable(error_view)
def __call__(self, view_or_request, *args, **kwargs):
if not self.wrapped:
self.view = view_or_request
def dec(*args, **kwargs):
try:
return self.view(*args, **kwargs)
except PermissionRequired as e:
kwargs['_perm'] = e.perm
kwargs['_view'] = self.view
return self.error_view(*args, **kwargs)
return dec(view_or_request, *args, **kwargs) if self.wrapped else dec
class permission_required(object):
"""
Decorator which builds upon the C{checks_permission} decorator to offer
the same functionality as the built-in
C{django.contrib.auth.decorators.permission_required} decorator but which
renders an error view insted of redirecting to the login page.
"""
def __init__(self, perm, error_view=None):
self.perm = perm
self.error_view = error_view
def __call__(self, view_func):
def decorator(request, *args, **kwargs):
if not request.user.has_perm(self.perm):
raise PermissionRequired(self.perm)
return view_func(request, *args, **kwargs)
return checks_permissions(self.error_view)(decorator)
| AnimeDB/adb-browser-frontend | adb/frontend/auth/decorators.py | Python | unlicense | 2,476 |
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 15 15:55:28 2013
@author: dyanna
"""
import numpy as np
from sklearn.svm import SVC
def getSample(pointA, pointB, numberOfPoints):
pointList = list(zip(np.random.uniform(-1,1.00,numberOfPoints),np.random.uniform(-1,1.00,numberOfPoints)))
sample = np.array([(i[0], i[1], isLeft(pointA, pointB, i)) for i in pointList])
y = sample[:,2]
breakpoint = False
while not breakpoint:
if(len(y[y==-1]) == 0 or len(y[y==1]) == 0):
pointList = list(zip(np.random.uniform(-1,1.00,numberOfPoints),np.random.uniform(-1,1.00,numberOfPoints)))
sample = np.array([(i[0], i[1], isLeft(pointA, pointB, i)) for i in pointList])
y = sample[:,2]
else:
breakpoint = True
return sample
def getRandomLine():
return list(zip(np.random.uniform(-1,1.00,2),np.random.uniform(-1,1.00,2)))
def getPoints(numberOfPoints):
pointList = list(zip(np.random.uniform(-1,1.00,numberOfPoints),np.random.uniform(-1,1.00,numberOfPoints)))
return pointList
def isLeft(a, b, c):
return 1 if ((b[0] - a[0])*(c[1] - a[1]) - (b[1] - a[1])*(c[0] - a[0])) > 0 else -1;
def sign(x):
return 1 if x > 0 else -1
def getMisMatchesQP(data, clf):
#print(data)
data_x = np.c_[data[:,0], data[:,1]]
results = clf.predict(data_x)
#print(np.sign(results))
print("mismatch ", float(len(data) - np.sum(np.sign(results) == np.sign(data[:,2])))/len(data))
print("score ", clf.score(data_x, data[:,2]))
return float(len(data) - np.sum(np.sign(results) == np.sign(data[:,2])))/len(data)
def doMonteCarloQP(pointa, pointb, clf, nopoint):
#print "weights ", weight
points = [(np.random.uniform(-1,1), np.random.uniform(-1,1)) for i in range(nopoint)]
#print points
dataset_Monte = np.array([(i[0],i[1], isLeft(pointa,pointb,i)) for i in points])
#print dataset_Monte
return getMisMatchesQP(dataset_Monte, clf)
def doPLA(sample):
w = np.array([0,0,0])
iteration = 0
it = 0
while True:#(it < 10):
iteration = iteration + 1
it = it + 1
mismatch = list()
for i in sample:
#print("point in question ", i , " weight ", w)
yy = w[0] + w[1] * i[0] + w[2] * i[1]
#print("this is after applying weight to a point ",yy)
point = [i[0], i[1], sign(yy)]
if any(np.equal(sample, point).all(1)):
#print "point not in sample"
if(point[2] == -1):
mismatch.append((1, (i[0]), (i[1])))
else:
mismatch.append((-1, -(i[0]), -(i[1])))
#print " length ", len(mismatch), " mismatch list ",mismatch
if(len(mismatch) > 0):
#find a random point and update w
choiceIndex = np.random.randint(0, len(mismatch))
choice = mismatch[choiceIndex]
#print("choice ", choice)
w = w + choice
#print "new weight ", w
else:
break
#print("this is the iteration ", iteration)
#print("this is the weight ", w)
#montelist = [monetcarlo((x1,y1),(x2,y2),w,10000) for i in range(5)]
#print("Montelist " , montelist)
#monteavg = sum([i for i in montelist])/10
return w, iteration
def getMisMatches(data, weights):
#print data
list1 = np.empty(len(data))
list1.fill(weights[0])
results = list1+ weights[1]*data[:,0]+weights[2]*data[:,1]
results = -1 * results
return float(len(data) - np.sum(np.sign(results) == np.sign(data[:,2])))/len(data)
def doMonteCarloNP(pointa, pointb, weights, nopoint):
#print "weights ", weight
points = [(np.random.uniform(-1,1), np.random.uniform(-1,1)) for i in range(nopoint)]
#print points
dataset_Monte = np.array([(i[0],i[1], isLeft(pointa,pointb,i)) for i in points])
#print dataset_Monte
return getMisMatches(dataset_Monte, weights)
if __name__ == "__main__":
'''X = np.array([[-1,-1],[-2,-1], [1,1], [2,1]])
y = np.array([1,1,2,2])
clf = SVC()
clf.fit(X,y)
print(clf.predict([[-0.8,-1]]))'''
#clf = SVC()
clf = SVC(C = 1000, kernel = 'linear')
monteavgavgQP = list()
monteavgavgPLA = list()
approxavgQP = list()
vectornumberavg = list()
predictavg = list()
for j in range(1):
#clf = SVC(C = 1000, kernel = 'linear')
monteavgQP = list()
monteavgPLA = list()
approxQP = list()
vectoravg = list()
for k in range(1000):
nopoints = 100
line = getRandomLine()
sample = getSample(line[0], line[1], nopoints)
#print(sample)
X = np.c_[sample[:,0], sample[:,1]]
y = sample[:,2]
#print(y)
clf.fit(X,y)
#print(clf.score(X,y))
w, it = doPLA(sample)
#print(len(clf.support_vectors_))
#print(clf.support_vectors_)
#print(clf.support_)
vectoravg.append(len(clf.support_vectors_))
#print(clf.predict(clf.support_vectors_)==1)
#print(clf.predict(clf.support_vectors_))
#print(clf.coef_)
montelistQP = [doMonteCarloQP(line[0], line[1], clf, 500) for i in range(1)]
qpMonte = sum(montelistQP)/len(montelistQP)
monteavgQP.append(sum(montelistQP)/len(montelistQP))
montelist = [ doMonteCarloNP(line[0], line[1], w, 500) for i in range(1)]
plaMonte = sum(montelist)/len(montelist)
monteavgPLA.append(plaMonte)
if(montelistQP < monteavgPLA):
approxQP.append(1)
else:
approxQP.append(0)
#print(sum(monteavgQP)/len(monteavgQP))
#print(sum(monteavgPLA)/len(monteavgPLA))
#print(sum(approxQP)/len(approxQP))
monteavgavgQP.append(sum(monteavgQP)/len(monteavgQP))
monteavgavgPLA.append(sum(monteavgPLA)/len(monteavgPLA))
approxavgQP.append(sum(approxQP)/len(approxQP))
vectornumberavg.append(sum(vectoravg)/len(vectoravg))
print(sum(monteavgavgQP)/len(monteavgavgQP))
print(sum(monteavgavgPLA)/len(monteavgavgPLA))
print("how good is it? ", sum(approxavgQP)/len(approxavgQP))
print("how good is it? ", sum(vectornumberavg)/len(vectornumberavg))
| pramodh-bn/learn-data-edx | Week 7/qp.py | Python | unlicense | 6,393 |
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
# demo many to many relationship
# http://docs.sqlalchemy.org/en/rel_0_9/orm/basic_relationships.html#many-to-many
engine = create_engine('sqlite:///manymany.db')
Base = declarative_base()
# Association table linking the two tables
# Also see: http://docs.sqlalchemy.org/en/rel_0_9/orm/basic_relationships.html#association-object
member_club_mapping = Table('member_club_mapping', Base.metadata,
Column('member_id', Integer, ForeignKey('member.id')),
Column('club_id', Integer, ForeignKey('club.id')))
class Member(Base):
__tablename__ = 'member'
id = Column(Integer, primary_key=True)
first_name = Column(String)
last_name = Column(String)
clubs = relationship('Club', back_populates='members',
secondary=member_club_mapping)
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
class Club(Base):
__tablename__ = 'club'
id = Column(Integer, primary_key=True)
name = Column(String)
members = relationship('Member', back_populates='clubs',
secondary=member_club_mapping)
def __init__(self, name):
self.name = name
# create tables
Base.metadata.create_all(engine)
# create a Session
Session = sessionmaker(bind=engine)
session = Session()
# Populate
member1 = Member('John', 'Doe')
club1 = Club('Club dub')
club1.members.append(member1)
session.add(club1)
club2 = Club('Club dub dub')
club2.members.append(member1)
session.add(club2)
club3 = Club('Club dub step')
session.add(club3)
member2 = Member('Jane', 'Allen')
member2.clubs.extend([club1, club2])
session.add(member2)
session.commit()
# query and print Member
res = session.query(Member).all()
for member in res:
print member.first_name, member.last_name , [club.name for club in member.clubs]
# query and print Club
res = session.query(Club).all()
for club in res:
print club.name, [(member.first_name, member.last_name) for member in club.members]
print 'After removing members with first name: Jane'
# Remove a record
record = session.query(Member).filter(Member.first_name == 'Jane').all()
for r in record:
session.delete(r)
session.commit()
# query and print Member
res = session.query(Member).all()
for member in res:
print member.first_name, member.last_name , [club.name for club in member.clubs]
# query and print
res = session.query(Club).all()
for club in res:
print club.name, [(member.first_name, member.last_name) for member in club.members]
print 'After removing the club, Club dub'
# Remove a record
record = session.query(Club).filter(Club.name == 'Club dub').all()
for r in record:
session.delete(r)
session.commit()
# query and print Member
res = session.query(Member).all()
for member in res:
print member.first_name, member.last_name , [club.name for club in member.clubs]
# query and print
res = session.query(Club).all()
for club in res:
print club.name, [(member.first_name, member.last_name) for member in club.members]
| amitsaha/learning | python/sqla_learning/many_many_relation.py | Python | unlicense | 3,275 |
import os
# Application constants
APP_NAME = 'job_offers'
INSTALL_DIR = os.path.dirname(os.path.abspath(__file__))
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOG_NAME = os.path.join(INSTALL_DIR, 'job_offers.log')
# Testing fixtures
JOB_OFFER_FIXTURES = os.path.join(INSTALL_DIR, "fixtures/job_offers.json")
| jvazquez/organization | organization/job_offers/constants.py | Python | unlicense | 334 |
#!/usr/bin/python
# uart-eg01.py
#
# to run on the other end of the UART
# screen /dev/ttyUSB1 115200
import serial
def readlineCR(uart):
line = b''
while True:
byte = uart.read()
line += byte
if byte == b'\r':
return line
uart = serial.Serial('/dev/ttyUSB0', baudrate=115200, timeout=1)
while True:
uart.write(b'\r\nSay something: ')
line = readlineCR(uart)
if line != b'exit\r':
lineStr = '\r\nYou sent : {}'.format(line.decode('utf-8'))
uart.write(lineStr.encode('utf-8'))
else:
uart.write(b'\r\nexiting\r\n')
uart.close()
exit(0)
| CurtisLeeBolin/Examples_Python | UART01.py | Python | unlicense | 567 |
from rec import CourseRecord
from score import RoomScore
from evaluation import ScheduleEvaluation
FULL_HOURS = 8 # 8:00AM - 4:00PM utilization
PARTIAL_HOURS = FULL_HOURS * 0.75 #75%
HALF_HOURS = FULL_HOURS * 0.50 #50%
SPARSE_HOURS = FULL_HOURS * 0.25 #25%
class LocationScore:
def __init__(self, evals=None):
self.evals = evals
self.courses = None
self.location = None
self.daily_weights = {"M": {}, "T": {}, "W": {}, "R": {}, "F": {} ,"S": {}}
self.daily_totals = {"M": {}, "T": {}, "W": {}, "R": {}, "F": {} ,"S": {}}
self.final_weighted = 0
self.weight_rank = 0 # 0 = worst, 1 = best
if evals != None:
self.courses = self.evals.get_records()
self.location = self.find_location()
self.final_weighted = self.calculate_final_weighted_score()
def reset_daily_weights(self):
for day in ["M", "T", "W", "R", "F", "S"]:
self.daily_weights[day] = 0
self.daily_totals[day] = 0
def get_daily_weight(self,day_of_week):
return self.daily_weights[day_of_week]
def normalize_final_weighted_score(self,minimum,maximum):
value = self.final_weighted
value -= minimum
if maximum - minimum > 0:
value /= ( maximum - minimum )
else:
value = 0
self.weight_rank = "{0:.2f}".format(value * 10)
def calculate_final_weighted_score(self):
score_sum = 0.00
score_total = 0.00
#reset daily stuff
self.reset_daily_weights()
for course, score in self.courses:
days = course.rec["DAYS_OF_WEEK"]
#score_sum += score.get_weighted_score(course)
score_total += 1.00
for day in ["M", "T", "W", "R", "F", "S"]:
if day in days:
self.daily_weights[day] += score.get_weighted_score(course)
self.daily_totals[day] += 1
for day in ["M", "T", "W", "R", "F", "S"]:
if self.daily_totals[day] > 0:
self.daily_weights[day] /= self.daily_totals[day]
self.daily_weights[day] = self.adjust_utilization(self.daily_weights[day],self.daily_totals[day])
score_sum += self.daily_weights[day]
else:
self.daily_weights[day] = 0
return score_sum / score_total
def adjust_utilization(self,weights,totals):
max_score = 1.00
if totals >= FULL_HOURS: # 8 Hours or more, give slight boost to score
weights *= 1.15 # 15% Boost
elif totals >= PARTIAL_HOURS: # Small Penalty
weights *= (PARTIAL_HOURS/FULL_HOURS)
elif totals >= HALF_HOURS: # Medium Penalty
weights *= (HALF_HOURS/FULL_HOURS)
elif totals > SPARSE_HOURS: # Large Penalty
weights *= (SPARSE_HOURS/FULL_HOURS)
else: # Very Large Penalty
weights *= (1.00/FULL_HOURS)
return weights
def get_location(self):
return self.location
def find_location(self):
for course, score in self.courses:
location = str( course.rec["BUILDING"] )+ " " + str( course.rec["ROOM"] )
# just need to find the first one, so break after this happens
break
return location
def get_final_weighted_score(self):
return self.final_weighted
def get_score_rank(self):
return self.weight_rank
def get_evals(self):
return self.evals
| jbrackins/scheduling-research | src/location.py | Python | unlicense | 3,581 |
"""redblue_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^red/', include('apps.red_app.urls', namespace='red_namespace')),
url(r'^blue/', include('apps.blue_app.urls', namespace='blue_namespace')),
url(r'^admin/', admin.site.urls),
]
| thinkAmi-sandbox/Django_iis_global_static_sample | redblue_project/urls.py | Python | unlicense | 991 |
from djangosanetesting.cases import HttpTestCase
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from accounts.tests import testdata
class TestResetPassword(HttpTestCase):
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.host = 'localhost'
self.port = 8000
def setUp(self):
testdata.run()
def test_reset_password(self):
res = self.client.post(reverse('password_reset'),
{'register_number' : settings.TEST_USERNAME,
},
follow=True)
assert reverse('password_reset_done') in res.request['PATH_INFO']
assert len(mail.outbox) == 1
reset_url = [word for word in mail.outbox[0].body.split() if word.startswith('http')][0]
res = self.client.get(reset_url, follow=True)
assert res.status_code == 200
assert 'unsuccessful' not in res.content.lower()
assert 'change my password' in res.content.lower()
# I've to stop here, because next step is to change password at Google Apps.
# Can't mess up production database.
| sramana/pysis | apps/passwords/tests/test_reset_password.py | Python | unlicense | 1,236 |
energies = dict()
energies[81] = -3.17 # Ammoniadimer.xyz
energies[82] = -5.02 # Waterdimer.xyz
energies[83] = -1.50 # BenzeneMethanecomplex.xyz
energies[84] = -18.61 # Formicaciddimer.xyz
energies[85] = -15.96 # Formamidedimer.xyz
energies[86] = -20.65 # Uracildimerhbonded.xyz
energies[87] = -16.71 # 2pyridoxine2aminopyridinecomplex.xyz
energies[88] = -16.37 # AdeninethymineWatsonCrickcomplex.xyz
energies[89] = -0.53 # Methanedimer.xyz
energies[90] = -1.51 # Ethenedimer.xyz
energies[91] = -2.73 # Benzenedimerparalleldisplaced.xyz
energies[92] = -4.42 # Pyrazinedimer.xyz
energies[93] = -10.12 # Uracildimerstack.xyz
energies[94] = -5.22 # Indolebenzenecomplexstack.xyz
energies[95] = -12.23 # Adeninethyminecomplexstack.xyz
energies[96] = -1.53 # Etheneethynecomplex.xyz
energies[97] = -3.28 # Benzenewatercomplex.xyz
energies[98] = -2.35 # Benzeneammoniacomplex.xyz
energies[99] = -4.46 # BenzeneHCNcomplex.xyz
energies[100] = -2.74 # BenzenedimerTshaped.xyz
energies[101] = -5.73 # IndolebenzeneTshapecomplex.xyz
energies[102] = -7.05 # Phenoldimer.xyz
names = dict()
names[81] = "Ammoniadimer.xyz"
names[82] = "Waterdimer.xyz"
names[83] = "BenzeneMethanecomplex.xyz"
names[84] = "Formicaciddimer.xyz"
names[85] = "Formamidedimer.xyz"
names[86] = "Uracildimerhbonded.xyz"
names[87] = "2pyridoxine2aminopyridinecomplex.xyz"
names[88] = "AdeninethymineWatsonCrickcomplex.xyz"
names[89] = "Methanedimer.xyz"
names[90] = "Ethenedimer.xyz"
names[91] = "Benzenedimerparalleldisplaced.xyz"
names[92] = "Pyrazinedimer.xyz"
names[93] = "Uracildimerstack.xyz"
names[94] = "Indolebenzenecomplexstack.xyz"
names[95] = "Adeninethyminecomplexstack.xyz"
names[96] = "Etheneethynecomplex.xyz"
names[97] = "Benzenewatercomplex.xyz"
names[98] = "Benzeneammoniacomplex.xyz"
names[99] = "BenzeneHCNcomplex.xyz"
names[100] = "BenzenedimerTshaped.xyz"
names[101] = "IndolebenzeneTshapecomplex.xyz"
names[102] = "Phenoldimer.xyz"
| andersx/s22-charmm | structures/ref.py | Python | unlicense | 2,038 |
from bitmovin.utils import Serializable
class AutoRestartConfiguration(Serializable):
def __init__(self, segments_written_timeout: float = None, bytes_written_timeout: float = None,
frames_written_timeout: float = None, hls_manifests_update_timeout: float = None,
dash_manifests_update_timeout: float = None, schedule_expression: str = None):
super().__init__()
self.segmentsWrittenTimeout = segments_written_timeout
self.bytesWrittenTimeout = bytes_written_timeout
self.framesWrittenTimeout = frames_written_timeout
self.hlsManifestsUpdateTimeout = hls_manifests_update_timeout
self.dashManifestsUpdateTimeout = dash_manifests_update_timeout
self.scheduleExpression = schedule_expression
| bitmovin/bitmovin-python | bitmovin/resources/models/encodings/live/auto_restart_configuration.py | Python | unlicense | 785 |
from google.appengine.ext import db
class Stuff (db.Model):
owner = db.UserProperty(required=True, auto_current_user=True)
pulp = db.BlobProperty()
class Greeting(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
avatar = db.BlobProperty()
date = db.DateTimeProperty(auto_now_add=True)
class Placebo(db.Model):
developer = db.StringProperty()
OID = db.StringProperty()
concept = db.StringProperty()
category = db.StringProperty()
taxonomy = db.StringProperty()
taxonomy_version = db.StringProperty()
code = db.StringProperty()
descriptor = db.StringProperty()
| 0--key/lib | portfolio/2009_GoogleAppEngine/apps/0--key/models.py | Python | apache-2.0 | 651 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matmul."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
import tensorflow as tf
from tensorflow.python.kernel_tests import gradient_checker as gc
class MatMulTest(tf.test.TestCase):
def _testCpuMatmul(self, x, y, transpose_x=False, transpose_y=False):
x_mat = np.matrix(x).T if transpose_x else np.matrix(x)
y_mat = np.matrix(y).T if transpose_y else np.matrix(y)
np_ans = x_mat * y_mat
with self.test_session(use_gpu=False):
tf_ans = tf.matmul(x, y, transpose_x, transpose_y).eval()
self.assertAllClose(np_ans, tf_ans)
self.assertAllEqual(np_ans.shape, tf_ans.shape)
def _testGpuMatmul(self, x, y, transpose_x=False, transpose_y=False):
x_mat = np.matrix(x).T if transpose_x else np.matrix(x)
y_mat = np.matrix(y).T if transpose_y else np.matrix(y)
np_ans = x_mat * y_mat
with self.test_session(use_gpu=True):
tf_ans = tf.matmul(x, y, transpose_x, transpose_y).eval()
self.assertAllClose(np_ans, tf_ans)
self.assertAllEqual(np_ans.shape, tf_ans.shape)
def _randMatrix(self, rows, cols, dtype):
if dtype is np.complex64:
real = self._randMatrix(rows, cols, np.float32)
imag = self._randMatrix(rows, cols, np.float32)
return real + np.complex(0, 1) * imag
else:
return np.random.uniform(low=1.0, high=100.0, size=rows * cols).reshape(
[rows, cols]).astype(dtype)
# Basic test:
# [ [1],
# [2],
# [3], * [1, 2]
# [4] ]
def testFloatBasic(self):
x = np.arange(1., 5.).reshape([4, 1]).astype(np.float32)
y = np.arange(1., 3.).reshape([1, 2]).astype(np.float32)
self._testCpuMatmul(x, y)
self._testGpuMatmul(x, y)
def testDoubleBasic(self):
x = np.arange(1., 5.).reshape([4, 1]).astype(np.float64)
y = np.arange(1., 3.).reshape([1, 2]).astype(np.float64)
self._testCpuMatmul(x, y)
def testInt32Basic(self):
x = np.arange(1., 5.).reshape([4, 1]).astype(np.int32)
y = np.arange(1., 3.).reshape([1, 2]).astype(np.int32)
self._testCpuMatmul(x, y)
def testSComplexBasic(self):
x = np.arange(1., 5.).reshape([4, 1]).astype(np.complex64)
y = np.arange(1., 3.).reshape([1, 2]).astype(np.complex64)
self._testCpuMatmul(x, y)
# Tests testing random sized matrices.
def testFloatRandom(self):
for _ in range(10):
n, k, m = np.random.randint(1, 100, size=3)
x = self._randMatrix(n, k, np.float32)
y = self._randMatrix(k, m, np.float32)
self._testCpuMatmul(x, y)
self._testGpuMatmul(x, y)
def testDoubleRandom(self):
for _ in range(10):
n, k, m = np.random.randint(1, 100, size=3)
x = self._randMatrix(n, k, np.float64)
y = self._randMatrix(k, m, np.float64)
self._testCpuMatmul(x, y)
def testInt32Random(self):
for _ in range(10):
n, k, m = np.random.randint(1, 100, size=3)
x = self._randMatrix(n, k, np.int32)
y = self._randMatrix(k, m, np.int32)
self._testCpuMatmul(x, y)
def testSComplexRandom(self):
for _ in range(10):
n, k, m = np.random.randint(1, 100, size=3)
x = self._randMatrix(n, k, np.complex64)
y = self._randMatrix(k, m, np.complex64)
self._testCpuMatmul(x, y)
# Test the cases that transpose the matrices before multiplying.
# NOTE(keveman): The cases where only one of the inputs is
# transposed are covered by tf.matmul's gradient function.
def testFloatRandomTransposeBoth(self):
for _ in range(10):
n, k, m = np.random.randint(1, 100, size=3)
x = self._randMatrix(k, n, np.float32)
y = self._randMatrix(m, k, np.float32)
self._testCpuMatmul(x, y, True, True)
self._testGpuMatmul(x, y, True, True)
def testDoubleRandomTranposeBoth(self):
for _ in range(10):
n, k, m = np.random.randint(1, 100, size=3)
x = self._randMatrix(k, n, np.float64)
y = self._randMatrix(m, k, np.float64)
self._testCpuMatmul(x, y, True, True)
def testMatMul_OutEmpty_A(self):
n, k, m = 0, 8, 3
x = self._randMatrix(n, k, np.float32)
y = self._randMatrix(k, m, np.float32)
self._testCpuMatmul(x, y)
self._testGpuMatmul(x, y)
def testMatMul_OutEmpty_B(self):
n, k, m = 3, 8, 0
x = self._randMatrix(n, k, np.float32)
y = self._randMatrix(k, m, np.float32)
self._testCpuMatmul(x, y)
self._testGpuMatmul(x, y)
def testMatMul_Inputs_Empty(self):
n, k, m = 3, 0, 4
x = self._randMatrix(n, k, np.float32)
y = self._randMatrix(k, m, np.float32)
self._testCpuMatmul(x, y)
self._testGpuMatmul(x, y)
# TODO(zhifengc): Figures out how to test matmul gradients on GPU.
class MatMulGradientTest(tf.test.TestCase):
def testGradientInput0(self):
with self.test_session(use_gpu=False):
x = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2],
dtype=tf.float64, name="x")
y = tf.constant([1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7],
shape=[2, 4], dtype=tf.float64, name="y")
m = tf.matmul(x, y, name="matmul")
err = gc.ComputeGradientError(x, [3, 2], m, [3, 4])
print("matmul input0 gradient err = ", err)
self.assertLess(err, 1e-10)
def testGradientInput1(self):
with self.test_session(use_gpu=False):
x = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2],
dtype=tf.float64, name="x")
y = tf.constant([1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7],
shape=[2, 4], dtype=tf.float64, name="y")
m = tf.matmul(x, y, name="matmul")
err = gc.ComputeGradientError(y, [2, 4], m, [3, 4])
print("matmul input1 gradient err = ", err)
self.assertLess(err, 1e-10)
def _VerifyInput0(self, transpose_a, transpose_b):
shape_x = [3, 2]
shape_y = [2, 4]
if transpose_a:
shape_x = list(reversed(shape_x))
if transpose_b:
shape_y = list(reversed(shape_y))
with self.test_session(use_gpu=False):
x = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=shape_x,
dtype=tf.float64, name="x")
y = tf.constant([1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7],
shape=shape_y, dtype=tf.float64, name="y")
m = tf.matmul(x, y, transpose_a, transpose_b, name="matmul")
err = gc.ComputeGradientError(x, shape_x, m, [3, 4])
print("matmul input0 gradient err = ", err)
self.assertLess(err, 1e-10)
def testGradientInput0WithTranspose(self):
self._VerifyInput0(transpose_a=True, transpose_b=False)
self._VerifyInput0(transpose_a=False, transpose_b=True)
self._VerifyInput0(transpose_a=True, transpose_b=True)
def _VerifyInput1(self, transpose_a, transpose_b):
shape_x = [3, 2]
shape_y = [2, 4]
if transpose_a:
shape_x = list(reversed(shape_x))
if transpose_b:
shape_y = list(reversed(shape_y))
with self.test_session(use_gpu=False):
x = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=shape_x,
dtype=tf.float64, name="x")
y = tf.constant([1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7],
shape=shape_y, dtype=tf.float64, name="y")
m = tf.matmul(x, y, transpose_a, transpose_b, name="matmul")
err = gc.ComputeGradientError(y, shape_y, m, [3, 4])
print("matmul input1 gradient err = ", err)
self.assertLess(err, 1e-10)
def testGradientInput1WithTranspose(self):
self._VerifyInput1(transpose_a=True, transpose_b=False)
self._VerifyInput1(transpose_a=False, transpose_b=True)
self._VerifyInput1(transpose_a=True, transpose_b=True)
if __name__ == "__main__":
tf.test.main()
| MehdiSfr/tensor-flow | tensorflow/python/kernel_tests/matmul_op_test.py | Python | apache-2.0 | 8,392 |
# Copyright 2011 WebDriver committers
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The ActionChains implementation."""
from selenium.webdriver.remote.command import Command
class ActionChains(object):
"""Generate user actions.
All actions are stored in the ActionChains object. Call perform() to fire
stored actions."""
def __init__(self, driver):
"""Creates a new ActionChains.
Args:
driver: The WebDriver instance which performs user actions.
"""
self._driver = driver
self._actions = []
def perform(self):
"""Performs all stored actions."""
for action in self._actions:
action()
def click(self, on_element=None):
"""Clicks an element.
Args:
on_element: The element to click.
If None, clicks on current mouse position.
"""
if on_element: self.move_to_element(on_element)
self._actions.append(lambda:
self._driver.execute(Command.CLICK, {'button': 0}))
return self
def click_and_hold(self, on_element):
"""Holds down the left mouse button on an element.
Args:
on_element: The element to mouse down.
If None, clicks on current mouse position.
"""
if on_element: self.move_to_element(on_element)
self._actions.append(lambda:
self._driver.execute(Command.MOUSE_DOWN, {}))
return self
def context_click(self, on_element):
"""Performs a context-click (right click) on an element.
Args:
on_element: The element to context-click.
If None, clicks on current mouse position.
"""
if on_element: self.move_to_element(on_element)
self._actions.append(lambda:
self._driver.execute(Command.CLICK, {'button': 2}))
return self
def double_click(self, on_element):
"""Double-clicks an element.
Args:
on_element: The element to double-click.
If None, clicks on current mouse position.
"""
if on_element: self.move_to_element(on_element)
self._actions.append(lambda:
self._driver.execute(Command.DOUBLE_CLICK, {}))
return self
def drag_and_drop(self, source, target):
"""Holds down the left mouse button on the source element,
then moves to the target element and releases the mouse button.
Args:
source: The element to mouse down.
target: The element to mouse up.
"""
self.click_and_hold(source)
self.release(target)
return self
def drag_and_drop_by_offset(self, source, xoffset, yoffset):
"""Holds down the left mouse button on the source element,
then moves to the target element and releases the mouse button.
Args:
source: The element to mouse down.
xoffset: X offset to move to.
yoffset: Y offset to move to.
"""
self.click_and_hold(source)
self.move_by_offset(xoffset, yoffset)
self.release(source)
return self
def key_down(self, key, element=None):
"""Sends a key press only, without releasing it.
Should only be used with modifier keys (Control, Alt and Shift).
Args:
key: The modifier key to send. Values are defined in Keys class.
target: The element to send keys.
If None, sends a key to current focused element.
"""
if element: self.click(element)
self._actions.append(lambda:
self._driver.execute(Command.SEND_MODIFIER_KEY_TO_ACTIVE_ELEMENT, {
"value": key,
"isdown": True}))
return self
def key_up(self, key, element=None):
"""Releases a modifier key.
Args:
key: The modifier key to send. Values are defined in Keys class.
target: The element to send keys.
If None, sends a key to current focused element.
"""
if element: self.click(element)
self._actions.append(lambda:
self._driver.execute(Command.SEND_MODIFIER_KEY_TO_ACTIVE_ELEMENT, {
"value": key,
"isdown": False}))
return self
def move_by_offset(self, xoffset, yoffset):
"""Moving the mouse to an offset from current mouse position.
Args:
xoffset: X offset to move to.
yoffset: Y offset to move to.
"""
self._actions.append(lambda:
self._driver.execute(Command.MOVE_TO, {
'xoffset': xoffset,
'yoffset': yoffset}))
return self
def move_to_element(self, to_element):
"""Moving the mouse to the middle of an element.
Args:
to_element: The element to move to.
"""
self._actions.append(lambda:
self._driver.execute(Command.MOVE_TO, {'element': to_element.id}))
return self
def move_to_element_with_offset(self, to_element, xoffset, yoffset):
"""Move the mouse by an offset of the specificed element.
Offsets are relative to the top-left corner of the element.
Args:
to_element: The element to move to.
xoffset: X offset to move to.
yoffset: Y offset to move to.
"""
self._actions.append(lambda:
self._driver.execute(Command.MOVE_TO, {
'element': to_element.id,
'xoffset': xoffset,
'yoffset': yoffset}))
return self
def release(self, on_element):
"""Releasing a held mouse button.
Args:
on_element: The element to mouse up.
"""
if on_element: self.move_to_element(on_element)
self._actions.append(lambda:
self._driver.execute(Command.MOUSE_UP, {}))
return self
def send_keys(self, *keys_to_send):
"""Sends keys to current focused element.
Args:
keys_to_send: The keys to send.
"""
self._actions.append(lambda:
self._driver.switch_to_active_element().send_keys(*keys_to_send))
return self
def send_keys_to_element(self, element, *keys_to_send):
"""Sends keys to an element.
Args:
element: The element to send keys.
keys_to_send: The keys to send.
"""
self._actions.append(lambda:
element.send_keys(*keys_to_send))
return self
| hali4ka/robotframework-selenium2library | src/Selenium2Library/lib/selenium-2.8.1/py/selenium/webdriver/common/action_chains.py | Python | apache-2.0 | 7,157 |
# Copyright 2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import shutil
import argparse
from .. import mlog
from ..mesonlib import has_path_sep
from . import destdir_join
from .gettext import read_linguas
parser = argparse.ArgumentParser()
parser.add_argument('command')
parser.add_argument('--id', dest='project_id')
parser.add_argument('--subdir', dest='subdir')
parser.add_argument('--installdir', dest='install_dir')
parser.add_argument('--sources', dest='sources')
parser.add_argument('--media', dest='media', default='')
parser.add_argument('--langs', dest='langs', default='')
parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
def build_pot(srcdir, project_id, sources):
# Must be relative paths
sources = [os.path.join('C', source) for source in sources]
outfile = os.path.join(srcdir, project_id + '.pot')
subprocess.call(['itstool', '-o', outfile] + sources)
def update_po(srcdir, project_id, langs):
potfile = os.path.join(srcdir, project_id + '.pot')
for lang in langs:
pofile = os.path.join(srcdir, lang, lang + '.po')
subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
def build_translations(srcdir, blddir, langs):
for lang in langs:
outdir = os.path.join(blddir, lang)
os.makedirs(outdir, exist_ok=True)
subprocess.call([
'msgfmt', os.path.join(srcdir, lang, lang + '.po'),
'-o', os.path.join(outdir, lang + '.gmo')
])
def merge_translations(blddir, sources, langs):
for lang in langs:
subprocess.call([
'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
'-o', os.path.join(blddir, lang)
] + sources)
def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, project_id, symlinks):
c_install_dir = os.path.join(install_dir, 'C', project_id)
for lang in langs + ['C']:
indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
os.makedirs(indir, exist_ok=True)
for source in sources:
infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source)
outfile = os.path.join(indir, source)
mlog.log('Installing %s to %s' % (infile, outfile))
shutil.copyfile(infile, outfile)
shutil.copystat(infile, outfile)
for m in media:
infile = os.path.join(srcdir, lang, m)
outfile = os.path.join(indir, m)
c_infile = os.path.join(srcdir, 'C', m)
if not os.path.exists(infile):
if not os.path.exists(c_infile):
mlog.warning('Media file "%s" did not exist in C directory' % m)
continue
elif symlinks:
srcfile = os.path.join(c_install_dir, m)
mlog.log('Symlinking %s to %s.' % (outfile, srcfile))
if has_path_sep(m):
os.makedirs(os.path.dirname(outfile), exist_ok=True)
try:
try:
os.symlink(srcfile, outfile)
except FileExistsError:
os.remove(outfile)
os.symlink(srcfile, outfile)
continue
except (NotImplementedError, OSError):
mlog.warning('Symlinking not supported, falling back to copying')
infile = c_infile
else:
# Lang doesn't have media file so copy it over 'C' one
infile = c_infile
mlog.log('Installing %s to %s' % (infile, outfile))
if has_path_sep(m):
os.makedirs(os.path.dirname(outfile), exist_ok=True)
shutil.copyfile(infile, outfile)
shutil.copystat(infile, outfile)
def run(args):
options = parser.parse_args(args)
langs = options.langs.split('@@') if options.langs else []
media = options.media.split('@@') if options.media else []
sources = options.sources.split('@@')
destdir = os.environ.get('DESTDIR', '')
src_subdir = os.path.join(os.environ['MESON_SOURCE_ROOT'], options.subdir)
build_subdir = os.path.join(os.environ['MESON_BUILD_ROOT'], options.subdir)
abs_sources = [os.path.join(src_subdir, 'C', source) for source in sources]
if not langs:
langs = read_linguas(src_subdir)
if options.command == 'pot':
build_pot(src_subdir, options.project_id, sources)
elif options.command == 'update-po':
build_pot(src_subdir, options.project_id, sources)
update_po(src_subdir, options.project_id, langs)
elif options.command == 'build':
if langs:
build_translations(src_subdir, build_subdir, langs)
elif options.command == 'install':
install_dir = os.path.join(os.environ['MESON_INSTALL_PREFIX'], options.install_dir)
if langs:
build_translations(src_subdir, build_subdir, langs)
merge_translations(build_subdir, abs_sources, langs)
install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
destdir, options.project_id, options.symlinks)
| becm/meson | mesonbuild/scripts/yelphelper.py | Python | apache-2.0 | 5,816 |
#
#Programa Lista 4, questão 1;
#Felipe Henrique Bastos Costa - 1615310032;
#
#
#
#
lista = []#lista vazia;
cont1 = 0#contador do indice;
cont2 = 1#contador da posição do numero, se é o primeiro, segundo etc;
v = 5#representaria o len da lista;
while(cont1 < v):
x = int(input("Informe o %dº numero inteiro para colocar em sua lista:\n"%cont2))#x e a variavel que recebe
#o numero do usuario
lista.append(x)#o numero informado para x e colocado dentro da lista;
cont1+=1#Os contadores estao
cont2+=1#sendo incrementados;
print("A lista de informada foi:\n%s"%lista)
| any1m1c/ipc20161 | lista4/ipc_lista4.01.py | Python | apache-2.0 | 675 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2016 Eugene Frolov <eugene@frolov.net.ru>
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import uuid as pyuuid
import mock
import requests
from six.moves.urllib import parse
from restalchemy.common import utils
from restalchemy.storage import exceptions
from restalchemy.storage.sql import engines
from restalchemy.tests.functional.restapi.ra_based.microservice import (
storable_models as models)
from restalchemy.tests.functional.restapi.ra_based.microservice import consts
from restalchemy.tests.functional.restapi.ra_based.microservice import service
from restalchemy.tests.unit import base
TEMPL_SERVICE_ENDPOINT = utils.lastslash("http://127.0.0.1:%s/")
TEMPL_ROOT_COLLECTION_ENDPOINT = TEMPL_SERVICE_ENDPOINT
TEMPL_V1_COLLECTION_ENDPOINT = utils.lastslash(parse.urljoin(
TEMPL_SERVICE_ENDPOINT, 'v1'))
TEMPL_VMS_COLLECTION_ENDPOINT = utils.lastslash(parse.urljoin(
TEMPL_V1_COLLECTION_ENDPOINT, 'vms'))
TEMPL_VM_RESOURCE_ENDPOINT = parse.urljoin(TEMPL_VMS_COLLECTION_ENDPOINT, '%s')
TEMPL_POWERON_ACTION_ENDPOINT = parse.urljoin(
utils.lastslash(TEMPL_VM_RESOURCE_ENDPOINT),
'actions/poweron/invoke')
TEMPL_PORTS_COLLECTION_ENDPOINT = utils.lastslash(parse.urljoin(
utils.lastslash(TEMPL_VM_RESOURCE_ENDPOINT), 'ports'))
TEMPL_PORT_RESOURCE_ENDPOINT = parse.urljoin(TEMPL_PORTS_COLLECTION_ENDPOINT,
'%s')
class BaseResourceTestCase(base.BaseTestCase):
def get_endpoint(self, template, *args):
return template % ((self.service_port,) + tuple(args))
def setUp(self):
super(BaseResourceTestCase, self).setUp()
engines.engine_factory.configure_factory(consts.DATABASE_URI)
engine = engines.engine_factory.get_engine()
self.session = engine.get_session()
self.session.execute("""CREATE TABLE IF NOT EXISTS vms (
uuid CHAR(36) NOT NULL,
state VARCHAR(10) NOT NULL,
name VARCHAR(255) NOT NULL,
PRIMARY KEY (uuid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;""", None)
self.service_port = random.choice(range(2100, 2200))
url = parse.urlparse(self.get_endpoint(TEMPL_SERVICE_ENDPOINT))
self._service = service.RESTService(bind_host=url.hostname,
bind_port=url.port)
self._service.start()
def tearDown(self):
super(BaseResourceTestCase, self).tearDown()
self._service.stop()
self.session.execute("DROP TABLE IF EXISTS vms;", None)
class TestRootResourceTestCase(BaseResourceTestCase):
def test_get_versions_list(self):
response = requests.get(self.get_endpoint(
TEMPL_ROOT_COLLECTION_ENDPOINT))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), ["v1"])
class TestVersionsResourceTestCase(BaseResourceTestCase):
def test_get_resources_list(self):
response = requests.get(
self.get_endpoint(TEMPL_V1_COLLECTION_ENDPOINT))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), ["vms"])
class TestVMResourceTestCase(BaseResourceTestCase):
def _insert_vm_to_db(self, uuid, name, state):
vm = models.VM(uuid=uuid, name=name, state=state)
vm.save()
def _vm_exists_in_db(self, uuid):
try:
models.VM.objects.get_one(filters={'uuid': uuid})
return True
except exceptions.RecordNotFound:
return False
@mock.patch('uuid.uuid4')
def test_create_vm_resource_successful(self, uuid4_mock):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
uuid4_mock.return_value = RESOURCE_ID
vm_request_body = {
"name": "test"
}
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "test",
"state": "off"
}
LOCATION = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT, RESOURCE_ID)
response = requests.post(self.get_endpoint(
TEMPL_VMS_COLLECTION_ENDPOINT), json=vm_request_body)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.headers['location'], LOCATION)
self.assertEqual(response.json(), vm_response_body)
def test_get_vm_resource_by_uuid_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="test", state="off")
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "test",
"state": "off"
}
VM_RES_ENDPOINT = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
RESOURCE_ID)
response = requests.get(VM_RES_ENDPOINT)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body)
def test_update_vm_resource_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="old", state="off")
vm_request_body = {
"name": "new"
}
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "new",
"state": "off"
}
VM_RES_ENDPOINT = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
RESOURCE_ID)
response = requests.put(VM_RES_ENDPOINT, json=vm_request_body)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body)
def test_delete_vm_resource_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="test", state="off")
VM_RES_ENDPOINT = self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
RESOURCE_ID)
response = requests.delete(VM_RES_ENDPOINT)
self.assertEqual(response.status_code, 204)
self.assertFalse(self._vm_exists_in_db(RESOURCE_ID))
def test_process_vm_action_successful(self):
RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
self._insert_vm_to_db(uuid=RESOURCE_ID, name="test", state="off")
vm_response_body = {
"uuid": str(RESOURCE_ID),
"name": "test",
"state": "on"
}
POWERON_ACT_ENDPOINT = self.get_endpoint(TEMPL_POWERON_ACTION_ENDPOINT,
RESOURCE_ID)
response = requests.post(POWERON_ACT_ENDPOINT)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body)
def test_get_collection_vms_successful(self):
RESOURCE_ID1 = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
RESOURCE_ID2 = pyuuid.UUID("00000000-0000-0000-0000-000000000002")
self._insert_vm_to_db(uuid=RESOURCE_ID1, name="test1", state="off")
self._insert_vm_to_db(uuid=RESOURCE_ID2, name="test2", state="on")
vm_response_body = [{
"uuid": str(RESOURCE_ID1),
"name": "test1",
"state": "off"
}, {
"uuid": str(RESOURCE_ID2),
"name": "test2",
"state": "on"
}]
response = requests.get(self.get_endpoint(
TEMPL_VMS_COLLECTION_ENDPOINT))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), vm_response_body)
class TestNestedResourceTestCase(BaseResourceTestCase):
def setUp(self):
super(TestNestedResourceTestCase, self).setUp()
self.session.execute("""CREATE TABLE IF NOT EXISTS ports (
uuid CHAR(36) NOT NULL,
mac CHAR(17) NOT NULL,
vm CHAR(36) NOT NULL,
PRIMARY KEY (uuid),
CONSTRAINT FOREIGN KEY ix_vms_uuid (vm) REFERENCES vms (uuid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;""", None)
self.vm1 = models.VM(
uuid=pyuuid.UUID("00000000-0000-0000-0000-000000000001"),
name="vm1",
state="on")
self.vm1.save(session=self.session)
self.vm2 = models.VM(
uuid=pyuuid.UUID("00000000-0000-0000-0000-000000000002"),
name="vm2",
state="off")
self.vm2.save(session=self.session)
self.session.commit()
def tearDown(self):
self.session.execute("DROP TABLE IF EXISTS ports;", None)
super(TestNestedResourceTestCase, self).tearDown()
@mock.patch('uuid.uuid4')
def test_create_nested_resource_successful(self, uuid4_mock):
VM_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
PORT_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000003")
uuid4_mock.return_value = PORT_RESOURCE_ID
port_request_body = {
"mac": "00:00:00:00:00:03"
}
port_response_body = {
"uuid": str(PORT_RESOURCE_ID),
"mac": "00:00:00:00:00:03",
"vm": parse.urlparse(
self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
VM_RESOURCE_ID)).path
}
LOCATION = self.get_endpoint(TEMPL_PORT_RESOURCE_ENDPOINT,
VM_RESOURCE_ID,
PORT_RESOURCE_ID)
response = requests.post(
self.get_endpoint(TEMPL_PORTS_COLLECTION_ENDPOINT, VM_RESOURCE_ID),
json=port_request_body)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.headers['location'], LOCATION)
self.assertEqual(response.json(), port_response_body)
def test_get_nested_resource_successful(self):
VM_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
PORT_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000003")
port = models.Port(uuid=PORT_RESOURCE_ID,
mac="00:00:00:00:00:03",
vm=self.vm1)
port.save(session=self.session)
self.session.commit()
port_response_body = {
"uuid": str(PORT_RESOURCE_ID),
"mac": "00:00:00:00:00:03",
"vm": parse.urlparse(
self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
VM_RESOURCE_ID)).path
}
response = requests.get(
self.get_endpoint(TEMPL_PORT_RESOURCE_ENDPOINT,
VM_RESOURCE_ID,
PORT_RESOURCE_ID))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), port_response_body)
def test_get_ports_collection_successful(self):
VM_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
PORT1_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000003")
PORT2_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000004")
PORT3_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000005")
port1 = models.Port(uuid=PORT1_RESOURCE_ID,
mac="00:00:00:00:00:03",
vm=self.vm1)
port1.save(session=self.session)
port2 = models.Port(uuid=PORT2_RESOURCE_ID,
mac="00:00:00:00:00:04",
vm=self.vm1)
port2.save(session=self.session)
port3 = models.Port(uuid=PORT3_RESOURCE_ID,
mac="00:00:00:00:00:05",
vm=self.vm2)
port3.save(session=self.session)
ports_response_body = [{
"uuid": str(PORT1_RESOURCE_ID),
"mac": "00:00:00:00:00:03",
"vm": parse.urlparse(
self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
VM_RESOURCE_ID)).path
}, {
"uuid": str(PORT2_RESOURCE_ID),
"mac": "00:00:00:00:00:04",
"vm": parse.urlparse(
self.get_endpoint(TEMPL_VM_RESOURCE_ENDPOINT,
VM_RESOURCE_ID)).path
}]
self.session.commit()
response = requests.get(
self.get_endpoint(TEMPL_PORTS_COLLECTION_ENDPOINT, VM_RESOURCE_ID))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), ports_response_body)
def test_delete_nested_resource_successful(self):
VM_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000001")
PORT_RESOURCE_ID = pyuuid.UUID("00000000-0000-0000-0000-000000000003")
port = models.Port(uuid=PORT_RESOURCE_ID,
mac="00:00:00:00:00:03",
vm=self.vm1)
port.save(session=self.session)
self.session.commit()
response = requests.delete(
self.get_endpoint(TEMPL_PORT_RESOURCE_ENDPOINT,
VM_RESOURCE_ID,
PORT_RESOURCE_ID))
self.assertEqual(response.status_code, 204)
self.assertRaises(exceptions.RecordNotFound,
models.Port.objects.get_one,
filters={'uuid': PORT_RESOURCE_ID})
| phantomii/restalchemy | restalchemy/tests/functional/restapi/ra_based/test_resources.py | Python | apache-2.0 | 13,940 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GKE nodes service account permissions for logging.
The service account used by GKE nodes should have the logging.logWriter
role, otherwise ingestion of logs won't work.
"""
from gcpdiag import lint, models
from gcpdiag.queries import gke, iam
ROLE = 'roles/logging.logWriter'
def prefetch_rule(context: models.Context):
# Make sure that we have the IAM policy in cache.
project_ids = {c.project_id for c in gke.get_clusters(context).values()}
for pid in project_ids:
iam.get_project_policy(pid)
def run_rule(context: models.Context, report: lint.LintReportRuleInterface):
# Find all clusters with logging enabled.
clusters = gke.get_clusters(context)
iam_policy = iam.get_project_policy(context.project_id)
if not clusters:
report.add_skipped(None, 'no clusters found')
for _, c in sorted(clusters.items()):
if not c.has_logging_enabled():
report.add_skipped(c, 'logging disabled')
else:
# Verify service-account permissions for every nodepool.
for np in c.nodepools:
sa = np.service_account
if not iam.is_service_account_enabled(sa, context.project_id):
report.add_failed(np, f'service account disabled or deleted: {sa}')
elif not iam_policy.has_role_permissions(f'serviceAccount:{sa}', ROLE):
report.add_failed(np, f'service account: {sa}\nmissing role: {ROLE}')
else:
report.add_ok(np)
| GoogleCloudPlatform/gcpdiag | gcpdiag/lint/gke/err_2021_001_logging_perm.py | Python | apache-2.0 | 1,986 |
""" Launcher functionality for the Google Compute Engine (GCE)
"""
import json
import logging
import os
from dcos_launch import onprem, util
from dcos_launch.platforms import gcp
from dcos_test_utils.helpers import Host
from googleapiclient.errors import HttpError
log = logging.getLogger(__name__)
def get_credentials(env=None) -> tuple:
path = None
if env is None:
env = os.environ.copy()
if 'GCE_CREDENTIALS' in env:
json_credentials = env['GCE_CREDENTIALS']
elif 'GOOGLE_APPLICATION_CREDENTIALS' in env:
path = env['GOOGLE_APPLICATION_CREDENTIALS']
json_credentials = util.read_file(path)
else:
raise util.LauncherError(
'MissingParameter', 'Either GCE_CREDENTIALS or GOOGLE_APPLICATION_CREDENTIALS must be set in env')
return json_credentials, path
class OnPremLauncher(onprem.AbstractOnpremLauncher):
# Launches a homogeneous cluster of plain GMIs intended for onprem DC/OS
def __init__(self, config: dict, env=None):
creds_string, _ = get_credentials(env)
self.gcp_wrapper = gcp.GcpWrapper(json.loads(creds_string))
self.config = config
@property
def deployment(self):
""" Builds a BareClusterDeployment instance with self.config, but only returns it successfully if the
corresponding real deployment (active machines) exists and doesn't contain any errors.
"""
try:
deployment = gcp.BareClusterDeployment(self.gcp_wrapper, self.config['deployment_name'],
self.config['gce_zone'])
info = deployment.get_info()
errors = info['operation'].get('error')
if errors:
raise util.LauncherError('DeploymentContainsErrors', str(errors))
return deployment
except HttpError as e:
if e.resp.status == 404:
raise util.LauncherError('DeploymentNotFound',
"The deployment you are trying to access doesn't exist") from e
raise e
def create(self) -> dict:
self.key_helper()
node_count = 1 + (self.config['num_masters'] + self.config['num_public_agents']
+ self.config['num_private_agents'])
gcp.BareClusterDeployment.create(
self.gcp_wrapper,
self.config['deployment_name'],
self.config['gce_zone'],
node_count,
self.config['disk_size'],
self.config['disk_type'],
self.config['source_image'],
self.config['machine_type'],
self.config['image_project'],
self.config['ssh_user'],
self.config['ssh_public_key'],
self.config['disable_updates'],
self.config['use_preemptible_vms'],
tags=self.config.get('tags'))
return self.config
def key_helper(self):
""" Generates a public key and a private key and stores them in the config. The public key will be applied to
all the instances in the deployment later on when wait() is called.
"""
if self.config['key_helper']:
private_key, public_key = util.generate_rsa_keypair()
self.config['ssh_private_key'] = private_key.decode()
self.config['ssh_public_key'] = public_key.decode()
def get_cluster_hosts(self) -> [Host]:
return list(self.deployment.hosts)[1:]
def get_bootstrap_host(self) -> Host:
return list(self.deployment.hosts)[0]
def wait(self):
""" Waits for the deployment to complete: first, the network that will contain the cluster is deployed. Once
the network is deployed, a firewall for the network and an instance template are deployed. Finally,
once the instance template is deployed, an instance group manager and all its instances are deployed.
"""
self.deployment.wait_for_completion()
def delete(self):
""" Deletes all the resources associated with the deployment (instance template, network, firewall, instance
group manager and all its instances.
"""
self.deployment.delete()
| dcos/dcos-launch | dcos_launch/gcp.py | Python | apache-2.0 | 4,208 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
'''
Created on 16/04/2013
@author: henar
'''
import httplib
import sys
import os
from xml.dom.minidom import parse, parseString
from xml.dom.minidom import getDOMImplementation
from xml.etree.ElementTree import Element, SubElement, tostring
import md5
import httplib, urllib
import utils
token = utils.obtainToken(keystone_ip, keystone_port, user, password, project)
print(token)
headers = {'Content-Type': 'application/xml', 'X-Auth-Token': token, 'Tenant-ID': vdc}
print(headers)
print('Get products in the software catalogue: ')
resource = "/sdc/rest/catalog/product"
data1 = utils.doRequestHttpOperation(domine, port, resource, 'GET', None, headers)
dom = parseString(data1)
try:
product = (dom.getElementsByTagName('product'))[0]
productname = product.firstChild.firstChild.nodeValue
print('First product in the software catalogue: ' + productname)
except:
print ("Error in the request to get products")
sys.exit(1)
print('Get Product Details ' + product_name )
data1 = utils.doRequestHttpOperation(domine, port, "/sdc/rest/catalog/product/" + product_name, 'GET', None, headers)
print(" OK")
print('Get Product Releases ' + product_name )
data1 = utils.doRequestHttpOperation(domine, port, "/sdc/rest/catalog/product/" + product_name + "/release", 'GET',
None, headers)
print(" OK")
print('Get Product Release Info ' + product_name + " " + product_version )
data1 = utils.doRequestHttpOperation(domine, port,
"/sdc/rest/catalog/product/" + product_name + "/release/" + product_version, 'GET', None, headers)
print(" OK")
print('Get Product Attributes ' + product_name )
data1 = utils.doRequestHttpOperation(domine, port, "/sdc/rest/catalog/product/" + product_name + '/attributes', 'GET',
None, headers)
print(" OK")
resource_product_instance = "/sdc/rest/vdc/" + vdc + "/productInstance"
print('Install a product in VM. Product ' + product_name )
productInstanceDto = utils.createProductInstanceDto(vm_ip, vm_fqn, product_name, product_version)
print (tostring(productInstanceDto))
task = utils.doRequestHttpOperation(domine, port, resource_product_instance, 'POST', tostring(productInstanceDto),
headers)
print (task)
status = utils.processTask(domine, port, task)
print (" " + status)
resource_get_info_product_instance = "/sdc/rest/vdc/" + vdc + "/productInstance/" + vm_fqn + '_' + product_name + '_' + product_version
print('Get Product Instance Info. Product ' + product_name )
data = utils.doRequestHttpOperation(domine, port, resource_get_info_product_instance, 'GET', None)
print(data)
status = utils.processProductInstanceStatus(data)
#if status != 'INSTALLED':
# print("Status not correct" + status)
resource_delete_product_instance = "/sdc/rest/vdc/" + vdc + "/productInstance/" + vm_fqn + '_' + product_name + '_' + product_version
print('Get Delete Product Instance ' + product_name )
task = utils.doRequestHttpOperation(domine, port, resource_delete_product_instance, 'DELETE', None)
status = utils.processTask(domine, port, task)
print(" OK")
data = utils.doRequestHttpOperation(domine, port, resource_delete_product_instance, 'GET', None)
statusProduct = utils.processProductInstanceStatus(data)
#if status != 'UNINSTALLED':
# print("Status not correct" + statusProduct)
| telefonicaid/fiware-sdc | automatization_scripts/get_software_catalogue.py | Python | apache-2.0 | 4,111 |
# Copyright 2014-2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown import ssh
from touchdown.aws.ec2.keypair import KeyPair
from touchdown.aws.iam import InstanceProfile
from touchdown.aws.vpc import SecurityGroup, Subnet
from touchdown.core import argument, errors, serializers
from touchdown.core.plan import Plan, Present
from touchdown.core.resource import Resource
from ..account import BaseAccount
from ..common import SimpleApply, SimpleDescribe, SimpleDestroy
class BlockDevice(Resource):
resource_name = "block_device"
virtual_name = argument.String(field="VirtualName")
device_name = argument.String(field="DeviceName")
disabled = argument.Boolean(field="NoDevice", serializer=serializers.Const(""))
class NetworkInterface(Resource):
resource_name = "network_interface"
public = argument.Boolean(default=False, field="AssociatePublicIpAddress")
security_groups = argument.ResourceList(SecurityGroup, field="Groups")
class Instance(Resource):
resource_name = "ec2_instance"
name = argument.String(min=3, max=128, field="Name", group="tags")
ami = argument.String(field="ImageId")
instance_type = argument.String(field="InstanceType")
key_pair = argument.Resource(KeyPair, field="KeyName")
subnet = argument.Resource(Subnet, field="SubnetId")
instance_profile = argument.Resource(
InstanceProfile,
field="IamInstanceProfile",
serializer=serializers.Dict(Name=serializers.Property("InstanceProfileName")),
)
user_data = argument.String(field="UserData")
network_interfaces = argument.ResourceList(
NetworkInterface, field="NetworkInterfaces"
)
block_devices = argument.ResourceList(
BlockDevice,
field="BlockDeviceMappings",
serializer=serializers.List(serializers.Resource()),
)
security_groups = argument.ResourceList(SecurityGroup, field="SecurityGroupIds")
tags = argument.Dict()
account = argument.Resource(BaseAccount)
class Describe(SimpleDescribe, Plan):
resource = Instance
service_name = "ec2"
api_version = "2015-10-01"
describe_action = "describe_instances"
describe_envelope = "Reservations[].Instances[]"
key = "InstanceId"
def get_describe_filters(self):
return {
"Filters": [
{"Name": "tag:Name", "Values": [self.resource.name]},
{
"Name": "instance-state-name",
"Values": [
"pending",
"running",
"shutting-down",
" stopping",
"stopped",
],
},
]
}
class Apply(SimpleApply, Describe):
create_action = "run_instances"
create_envelope = "Instances[0]"
# create_response = 'id-only'
waiter = "instance_running"
signature = (Present("name"),)
def get_create_serializer(self):
return serializers.Resource(MaxCount=1, MinCount=1)
class Destroy(SimpleDestroy, Describe):
destroy_action = "terminate_instances"
waiter = "instance_terminated"
def get_destroy_serializer(self):
return serializers.Dict(
InstanceIds=serializers.ListOfOne(serializers.Property("InstanceId"))
)
class SSHInstance(ssh.Instance):
resource_name = "ec2_instance"
input = Instance
def get_network_id(self, runner):
# FIXME: We can save on some steps if we only do this once
obj = runner.get_plan(self.adapts).describe_object()
return obj.get("VpcId", None)
def get_serializer(self, runner, **kwargs):
obj = runner.get_plan(self.adapts).describe_object()
if getattr(self.parent, "proxy", None) and self.parent.proxy.instance:
if hasattr(self.parent.proxy.instance, "get_network_id"):
network = self.parent.proxy.instance.get_network_id(runner)
if network == self.get_network_id(runner):
return serializers.Const(obj["PrivateIpAddress"])
if obj.get("PublicDnsName", ""):
return serializers.Const(obj["PublicDnsName"])
if obj.get("PublicIpAddress", ""):
return serializers.Const(obj["PublicIpAddress"])
raise errors.Error("Instance {} not available".format(self.adapts))
| yaybu/touchdown | touchdown/aws/ec2/instance.py | Python | apache-2.0 | 4,911 |
"""Support for monitoring OctoPrint sensors."""
from __future__ import annotations
from datetime import datetime, timedelta
import logging
from pyoctoprintapi import OctoprintJobInfo, OctoprintPrinterInfo
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE, TEMP_CELSIUS
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import OctoprintDataUpdateCoordinator
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
JOB_PRINTING_STATES = ["Printing from SD", "Printing"]
def _is_printer_printing(printer: OctoprintPrinterInfo) -> bool:
return (
printer
and printer.state
and printer.state.flags
and printer.state.flags.printing
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the available OctoPrint binary sensors."""
coordinator: OctoprintDataUpdateCoordinator = hass.data[DOMAIN][
config_entry.entry_id
]["coordinator"]
device_id = config_entry.unique_id
assert device_id is not None
entities: list[SensorEntity] = []
if coordinator.data["printer"]:
printer_info = coordinator.data["printer"]
types = ["actual", "target"]
for tool in printer_info.temperatures:
for temp_type in types:
entities.append(
OctoPrintTemperatureSensor(
coordinator,
tool.name,
temp_type,
device_id,
)
)
else:
_LOGGER.error("Printer appears to be offline, skipping temperature sensors")
entities.append(OctoPrintStatusSensor(coordinator, device_id))
entities.append(OctoPrintJobPercentageSensor(coordinator, device_id))
entities.append(OctoPrintEstimatedFinishTimeSensor(coordinator, device_id))
entities.append(OctoPrintStartTimeSensor(coordinator, device_id))
async_add_entities(entities)
class OctoPrintSensorBase(CoordinatorEntity, SensorEntity):
"""Representation of an OctoPrint sensor."""
coordinator: OctoprintDataUpdateCoordinator
def __init__(
self,
coordinator: OctoprintDataUpdateCoordinator,
sensor_type: str,
device_id: str,
) -> None:
"""Initialize a new OctoPrint sensor."""
super().__init__(coordinator)
self._device_id = device_id
self._attr_name = f"OctoPrint {sensor_type}"
self._attr_unique_id = f"{sensor_type}-{device_id}"
@property
def device_info(self):
"""Device info."""
return self.coordinator.device_info
class OctoPrintStatusSensor(OctoPrintSensorBase):
"""Representation of an OctoPrint sensor."""
_attr_icon = "mdi:printer-3d"
def __init__(
self, coordinator: OctoprintDataUpdateCoordinator, device_id: str
) -> None:
"""Initialize a new OctoPrint sensor."""
super().__init__(coordinator, "Current State", device_id)
@property
def native_value(self):
"""Return sensor state."""
printer: OctoprintPrinterInfo = self.coordinator.data["printer"]
if not printer:
return None
return printer.state.text
@property
def available(self) -> bool:
"""Return if entity is available."""
return self.coordinator.last_update_success and self.coordinator.data["printer"]
class OctoPrintJobPercentageSensor(OctoPrintSensorBase):
"""Representation of an OctoPrint sensor."""
_attr_native_unit_of_measurement = PERCENTAGE
_attr_icon = "mdi:file-percent"
def __init__(
self, coordinator: OctoprintDataUpdateCoordinator, device_id: str
) -> None:
"""Initialize a new OctoPrint sensor."""
super().__init__(coordinator, "Job Percentage", device_id)
@property
def native_value(self):
"""Return sensor state."""
job: OctoprintJobInfo = self.coordinator.data["job"]
if not job:
return None
if not (state := job.progress.completion):
return 0
return round(state, 2)
class OctoPrintEstimatedFinishTimeSensor(OctoPrintSensorBase):
"""Representation of an OctoPrint sensor."""
_attr_device_class = SensorDeviceClass.TIMESTAMP
def __init__(
self, coordinator: OctoprintDataUpdateCoordinator, device_id: str
) -> None:
"""Initialize a new OctoPrint sensor."""
super().__init__(coordinator, "Estimated Finish Time", device_id)
@property
def native_value(self) -> datetime | None:
"""Return sensor state."""
job: OctoprintJobInfo = self.coordinator.data["job"]
if (
not job
or not job.progress.print_time_left
or not _is_printer_printing(self.coordinator.data["printer"])
):
return None
read_time = self.coordinator.data["last_read_time"]
return read_time + timedelta(seconds=job.progress.print_time_left)
class OctoPrintStartTimeSensor(OctoPrintSensorBase):
"""Representation of an OctoPrint sensor."""
_attr_device_class = SensorDeviceClass.TIMESTAMP
def __init__(
self, coordinator: OctoprintDataUpdateCoordinator, device_id: str
) -> None:
"""Initialize a new OctoPrint sensor."""
super().__init__(coordinator, "Start Time", device_id)
@property
def native_value(self) -> datetime | None:
"""Return sensor state."""
job: OctoprintJobInfo = self.coordinator.data["job"]
if (
not job
or not job.progress.print_time
or not _is_printer_printing(self.coordinator.data["printer"])
):
return None
read_time = self.coordinator.data["last_read_time"]
return read_time - timedelta(seconds=job.progress.print_time)
class OctoPrintTemperatureSensor(OctoPrintSensorBase):
"""Representation of an OctoPrint sensor."""
_attr_native_unit_of_measurement = TEMP_CELSIUS
_attr_device_class = SensorDeviceClass.TEMPERATURE
_attr_state_class = SensorStateClass.MEASUREMENT
def __init__(
self,
coordinator: OctoprintDataUpdateCoordinator,
tool: str,
temp_type: str,
device_id: str,
) -> None:
"""Initialize a new OctoPrint sensor."""
super().__init__(coordinator, f"{temp_type} {tool} temp", device_id)
self._temp_type = temp_type
self._api_tool = tool
@property
def native_value(self):
"""Return sensor state."""
printer: OctoprintPrinterInfo = self.coordinator.data["printer"]
if not printer:
return None
for temp in printer.temperatures:
if temp.name == self._api_tool:
val = (
temp.actual_temp
if self._temp_type == "actual"
else temp.target_temp
)
if val is None:
return None
return round(val, 2)
return None
@property
def available(self) -> bool:
"""Return if entity is available."""
return self.coordinator.last_update_success and self.coordinator.data["printer"]
| home-assistant/home-assistant | homeassistant/components/octoprint/sensor.py | Python | apache-2.0 | 7,576 |
import jps
import json
import time
class MessageHolder(object):
def __init__(self):
self._saved_msg = []
def __call__(self, msg):
self._saved_msg.append(msg)
def get_msg(self):
return self._saved_msg
def test_multi_pubsub_once():
holder1 = MessageHolder()
holder2 = MessageHolder()
holder3 = MessageHolder()
sub1 = jps.Subscriber('test_utils1', holder1)
sub2 = jps.Subscriber('test_utils2', holder2)
sub3 = jps.Subscriber('test_utils3', holder3)
pub = jps.utils.JsonMultiplePublisher()
time.sleep(0.1)
pub.publish(
'{"test_utils1": "hoge", "test_utils2": {"x": 3}, "test_utils3": 5}')
time.sleep(0.1)
sub1.spin_once()
sub2.spin_once()
sub3.spin_once()
assert len(holder1.get_msg()) == 1
assert json.loads(holder1.get_msg()[0]) == 'hoge'
assert len(holder2.get_msg()) == 1
obj = json.loads(holder2.get_msg()[0])
assert obj['x'] == 3
assert len(holder3.get_msg()) == 1
assert json.loads(holder3.get_msg()[0]) == 5
def test_to_obj():
msg = '{"aa": 1, "bb": ["hoge", "hogi"], "cc": {"cc1" : 50}}'
converted = jps.utils.to_obj(msg)
assert converted.aa == 1
assert converted.bb[0] == 'hoge'
assert converted.bb[1] == 'hogi'
assert len(converted.bb) == 2
assert converted.cc.cc1 == 50
# todo: do
# json = converted.to_json()
# assert json == msg
# todo
def test_to_obj_list():
msg = '["hoge", "hogi", {"atr1": "val2", "atr2": 1.0}]'
bb = jps.utils.to_obj(msg)
assert len(bb) == 2
assert bb[0] == 'hoge'
assert bb[1] == 'hogi'
assert bb[2].atr1 == 'val2'
assert bb[2].atr2 == 1.0
# json = bb.to_json()
# assert json == msg
def test_to_obj_list():
msg = '[{"hoge": 1}, {"hogi": 2}]'
bb = jps.utils.to_obj(msg)
assert len(bb) == 2
assert bb[0].hoge == 1
assert bb[1].hogi == 2
# todo: list support
# json = bb.to_json()
# assert json == msg
def test_to_obj_simple():
msg = '{"aa": 1, "cc": 3, "bb": 2}'
converted = jps.utils.to_obj(msg)
assert converted.aa == 1
assert converted.bb == 2
assert converted.cc == 3
# works only super simple case
json1 = converted.to_json()
assert json1 == msg
| OTL/jps | test/test_utils.py | Python | apache-2.0 | 2,258 |
# -*- coding:utf-8 -*-
"""
Verion: 1.0
Author: zhangjian
Site: http://iliangqunru.com
File: __init__.py.py
Time: 2017/7/22 2:19
"""
| Xarrow/pySimulatedDNS | dnsCat/__init__.py | Python | apache-2.0 | 139 |
import codecs
import mock
import os
import tempfile
import unittest
from time import strftime
import six
from kinto import config
from kinto import __version__
class ConfigTest(unittest.TestCase):
def test_transpose_parameters_into_template(self):
self.maxDiff = None
template = "kinto.tpl"
dest = tempfile.mktemp()
config.render_template(template, dest,
secret='secret',
storage_backend='storage_backend',
cache_backend='cache_backend',
permission_backend='permission_backend',
storage_url='storage_url',
cache_url='cache_url',
permission_url='permission_url',
kinto_version='kinto_version',
config_file_timestamp='config_file_timestamp')
with codecs.open(dest, 'r', encoding='utf-8') as d:
destination_temp = d.read()
sample_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"test_configuration/test.ini")
with codecs.open(sample_path, 'r', encoding='utf-8') as c:
sample = c.read()
self.assertEqual(destination_temp, sample)
def test_create_destination_directory(self):
dest = os.path.join(tempfile.mkdtemp(), 'config', 'kinto.ini')
config.render_template("kinto.tpl", dest,
secret='secret',
storage_backend='storage_backend',
cache_backend='cache_backend',
permission_backend='permission_backend',
storage_url='storage_url',
cache_url='cache_url',
permission_url='permission_url',
kinto_version='kinto_version',
config_file_timestamp='config_file_timestamp')
self.assertTrue(os.path.exists(dest))
@mock.patch('kinto.config.render_template')
def test_hmac_secret_is_text(self, mocked_render_template):
config.init('kinto.ini', 'postgresql')
args, kwargs = list(mocked_render_template.call_args)
self.assertEquals(type(kwargs['secret']), six.text_type)
@mock.patch('kinto.config.render_template')
def test_init_postgresql_values(self, mocked_render_template):
config.init('kinto.ini', 'postgresql')
args, kwargs = list(mocked_render_template.call_args)
self.assertEquals(args, ('kinto.tpl', 'kinto.ini'))
postgresql_url = "postgres://postgres:postgres@localhost/postgres"
self.assertDictEqual(kwargs, {
'secret': kwargs['secret'],
'storage_backend': 'kinto.core.storage.postgresql',
'cache_backend': 'kinto.core.cache.postgresql',
'permission_backend': 'kinto.core.permission.postgresql',
'storage_url': postgresql_url,
'cache_url': postgresql_url,
'permission_url': postgresql_url,
'kinto_version': __version__,
'config_file_timestamp': strftime('%a, %d %b %Y %H:%M:%S %z')
})
@mock.patch('kinto.config.render_template')
def test_init_redis_values(self, mocked_render_template):
config.init('kinto.ini', 'redis')
args, kwargs = list(mocked_render_template.call_args)
self.assertEquals(args, ('kinto.tpl', 'kinto.ini'))
redis_url = "redis://localhost:6379"
self.maxDiff = None # See the full diff in case of error
self.assertDictEqual(kwargs, {
'secret': kwargs['secret'],
'storage_backend': 'kinto_redis.storage',
'cache_backend': 'kinto_redis.cache',
'permission_backend': 'kinto_redis.permission',
'storage_url': redis_url + '/1',
'cache_url': redis_url + '/2',
'permission_url': redis_url + '/3',
'kinto_version': __version__,
'config_file_timestamp': strftime('%a, %d %b %Y %H:%M:%S %z')
})
@mock.patch('kinto.config.render_template')
def test_init_memory_values(self, mocked_render_template):
config.init('kinto.ini', 'memory')
args, kwargs = list(mocked_render_template.call_args)
self.assertEquals(args, ('kinto.tpl', 'kinto.ini'))
self.assertDictEqual(kwargs, {
'secret': kwargs['secret'],
'storage_backend': 'kinto.core.storage.memory',
'cache_backend': 'kinto.core.cache.memory',
'permission_backend': 'kinto.core.permission.memory',
'storage_url': '',
'cache_url': '',
'permission_url': '',
'kinto_version': __version__,
'config_file_timestamp': strftime('%a, %d %b %Y %H:%M:%S %z')
})
def test_render_template_creates_directory_if_necessary(self):
temp_path = tempfile.mkdtemp()
destination = os.path.join(temp_path, 'config/kinto.ini')
config.render_template('kinto.tpl', destination, **{
'secret': "abcd-ceci-est-un-secret",
'storage_backend': 'kinto.core.storage.memory',
'cache_backend': 'kinto.core.cache.memory',
'permission_backend': 'kinto.core.permission.memory',
'storage_url': '',
'cache_url': '',
'permission_url': '',
'kinto_version': '',
'config_file_timestamp': ''
})
self.assertTrue(os.path.exists(destination))
def test_render_template_works_with_file_in_cwd(self):
temp_path = tempfile.mkdtemp()
os.chdir(temp_path)
config.render_template('kinto.tpl', 'kinto.ini', **{
'secret': "abcd-ceci-est-un-secret",
'storage_backend': 'kinto.core.storage.memory',
'cache_backend': 'kinto.core.cache.memory',
'permission_backend': 'kinto.core.permission.memory',
'storage_url': '',
'cache_url': '',
'permission_url': '',
'kinto_version': '',
'config_file_timestamp': ''
})
self.assertTrue(os.path.exists(
os.path.join(temp_path, 'kinto.ini')
))
| monikagrabowska/osf.io | kinto/tests/test_config.py | Python | apache-2.0 | 6,371 |
class Solution(object):
def containsNearbyAlmostDuplicate(self, nums, k, t):
"""
:type nums: List[int]
:type k: int
:type t: int
:rtype: bool
"""
if k < 1 or t < 0:
return False
dic = {}
t += 1
for i in range(len(nums)):
if i > k:
del dic[nums[i - k - 1] // t]
m = nums[i] // t
if m in dic:
return True
if m - 1 in dic and abs(nums[i] - dic[m - 1]) < t:
return True
if m + 1 in dic and abs(nums[i] - dic[m + 1]) < t:
return True
dic[m] = nums[i]
return False
test = Solution()
print(test.containsNearbyAlmostDuplicate([1, 3, 1], 1, 1))
| rx2130/Leetcode | python/220 Contains Duplicate III.py | Python | apache-2.0 | 777 |
import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_dynamodb2
@mock_dynamodb2
def test_error_on_wrong_value_for_consumed_capacity():
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "job_id", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
with pytest.raises(ClientError) as ex:
table.put_item(Item=item, ReturnConsumedCapacity="Garbage")
err = ex.value.response["Error"]
err["Code"].should.equal("ValidationException")
err["Message"].should.equal(
"1 validation error detected: Value 'Garbage' at 'returnConsumedCapacity' failed to satisfy constraint: Member must satisfy enum value set: [INDEXES, TOTAL, NONE]"
)
@mock_dynamodb2
def test_consumed_capacity_get_unknown_item():
conn = boto3.client("dynamodb", region_name="us-east-1")
conn.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "u", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "u", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
response = conn.get_item(
TableName="test_table",
Key={"u": {"S": "does_not_exist"}},
ReturnConsumedCapacity="TOTAL",
)
# Should still return ConsumedCapacity, even if it does not return an item
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"].should.equal(
{"TableName": "test_table", "CapacityUnits": 0.5}
)
@mock_dynamodb2
@pytest.mark.parametrize(
"capacity,should_have_capacity,should_have_table",
[
[None, False, False],
["NONE", False, False],
["TOTAL", True, False],
["INDEXES", True, True],
],
)
def test_only_return_consumed_capacity_when_required(
capacity, should_have_capacity, should_have_table
):
resource = boto3.resource("dynamodb", region_name="ap-northeast-3")
client = boto3.client("dynamodb", region_name="ap-northeast-3")
client.create_table(
TableName="jobs",
KeySchema=[{"AttributeName": "job_id", "KeyType": "HASH"}],
LocalSecondaryIndexes=[
{
"IndexName": "job_name-index",
"KeySchema": [{"AttributeName": "job_name", "KeyType": "HASH"}],
"Projection": {"ProjectionType": "ALL"},
}
],
AttributeDefinitions=[
{"AttributeName": "job_id", "AttributeType": "S"},
{"AttributeName": "job_name", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
table = resource.Table("jobs")
item = {"job_id": "asdasdasd", "expires_at": "1"}
# PUT_ITEM
args = {"Item": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.put_item(**args)
validate_response(response, should_have_capacity, should_have_table)
# GET_ITEM
args = {"Key": item}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = table.get_item(**args)
validate_response(response, should_have_capacity, should_have_table, value=0.5)
# SCAN
args = {"TableName": "jobs"}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table)
# SCAN_INDEX
args["IndexName"] = "job_name-index"
response = client.scan(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
# QUERY
args = {
"TableName": "jobs",
"KeyConditionExpression": "job_id = :id",
"ExpressionAttributeValues": {":id": {"S": "asdasdasd"}},
}
if capacity:
args["ReturnConsumedCapacity"] = capacity
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table)
# QUERY_INDEX
args["IndexName"] = "job_name-index"
response = client.query(**args)
validate_response(response, should_have_capacity, should_have_table, is_index=True)
def validate_response(
response, should_have_capacity, should_have_table, is_index=False, value=1.0
):
if should_have_capacity:
response.should.have.key("ConsumedCapacity")
response["ConsumedCapacity"]["TableName"].should.equal("jobs")
response["ConsumedCapacity"]["CapacityUnits"].should.equal(value)
if should_have_table:
response["ConsumedCapacity"]["Table"].should.equal({"CapacityUnits": value})
if is_index:
response["ConsumedCapacity"].should.have.key("LocalSecondaryIndexes")
response["ConsumedCapacity"]["LocalSecondaryIndexes"].should.equal(
{"job_name-index": {"CapacityUnits": value}}
)
else:
response.shouldnt.have.key("ConsumedCapacity")
| spulec/moto | tests/test_dynamodb2/test_dynamodb_consumedcapacity.py | Python | apache-2.0 | 5,333 |
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from rdmo.core.exports import XMLResponse
from rdmo.core.permissions import HasModelPermission
from rdmo.core.views import ChoicesViewSet
from rdmo.core.viewsets import CopyModelMixin
from .models import Condition
from .renderers import ConditionRenderer
from .serializers.export import ConditionExportSerializer
from .serializers.v1 import ConditionIndexSerializer, ConditionSerializer
class ConditionViewSet(CopyModelMixin, ModelViewSet):
permission_classes = (HasModelPermission, )
queryset = Condition.objects.select_related('source', 'target_option') \
.prefetch_related('optionsets', 'questionsets', 'questions', 'tasks')
serializer_class = ConditionSerializer
filter_backends = (DjangoFilterBackend,)
filterset_fields = (
'uri',
'key',
'source',
'relation',
'target_text',
'target_option'
)
@action(detail=False)
def index(self, request):
queryset = Condition.objects.select_related('source', 'target_option')
serializer = ConditionIndexSerializer(queryset, many=True)
return Response(serializer.data)
@action(detail=False, permission_classes=[HasModelPermission])
def export(self, request):
serializer = ConditionExportSerializer(self.get_queryset(), many=True)
xml = ConditionRenderer().render(serializer.data)
return XMLResponse(xml, name='conditions')
@action(detail=True, url_path='export', permission_classes=[HasModelPermission])
def detail_export(self, request, pk=None):
serializer = ConditionExportSerializer(self.get_object())
xml = ConditionRenderer().render([serializer.data])
return XMLResponse(xml, name=self.get_object().key)
class RelationViewSet(ChoicesViewSet):
permission_classes = (IsAuthenticated, )
queryset = Condition.RELATION_CHOICES
| rdmorganiser/rdmo | rdmo/conditions/viewsets.py | Python | apache-2.0 | 2,141 |
Subsets and Splits