code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2017 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from contextlib import contextmanager
from xml.etree.ElementTree import Element, SubElement, fromstring, tostring
from ansible.module_utils.connection import exec_command
NS_MAP = {'nc': "urn:ietf:params:xml:ns:netconf:base:1.0"}
def send_request(module, obj, check_rc=True):
request = tostring(obj)
rc, out, err = exec_command(module, request)
if rc != 0 and check_rc:
error_root = fromstring(err)
fake_parent = Element('root')
fake_parent.append(error_root)
error_list = fake_parent.findall('.//nc:rpc-error', NS_MAP)
if not error_list:
module.fail_json(msg=str(err))
warnings = []
for rpc_error in error_list:
message = rpc_error.find('./nc:error-message', NS_MAP).text
severity = rpc_error.find('./nc:error-severity', NS_MAP).text
if severity == 'warning':
warnings.append(message)
else:
module.fail_json(msg=str(err))
return warnings
return fromstring(out)
def children(root, iterable):
for item in iterable:
try:
ele = SubElement(ele, item)
except NameError:
ele = SubElement(root, item)
def lock(module, target='candidate'):
obj = Element('lock')
children(obj, ('target', target))
return send_request(module, obj)
def unlock(module, target='candidate'):
obj = Element('unlock')
children(obj, ('target', target))
return send_request(module, obj)
def commit(module):
return send_request(module, Element('commit'))
def discard_changes(module):
return send_request(module, Element('discard-changes'))
def validate(module):
obj = Element('validate')
children(obj, ('source', 'candidate'))
return send_request(module, obj)
def get_config(module, source='running', filter=None):
obj = Element('get-config')
children(obj, ('source', source))
children(obj, ('filter', filter))
return send_request(module, obj)
@contextmanager
def locked_config(module):
try:
lock(module)
yield
finally:
unlock(module)
| tux-00/ansible | lib/ansible/module_utils/netconf.py | Python | gpl-3.0 | 3,772 |
require 'active_support/core_ext/array'
require 'active_support/core_ext/hash/except'
require 'active_support/core_ext/kernel/singleton_class'
require 'active_support/core_ext/object/blank'
module ActiveRecord
# = Active Record Named \Scopes
module NamedScope
extend ActiveSupport::Concern
module ClassMethods
# Returns an anonymous \scope.
#
# posts = Post.scoped
# posts.size # Fires "select count(*) from posts" and returns the count
# posts.each {|p| puts p.name } # Fires "select * from posts" and loads post objects
#
# fruits = Fruit.scoped
# fruits = fruits.where(:colour => 'red') if options[:red_only]
# fruits = fruits.limit(10) if limited?
#
# Anonymous \scopes tend to be useful when procedurally generating complex
# queries, where passing intermediate values (\scopes) around as first-class
# objects is convenient.
#
# You can define a \scope that applies to all finders using
# ActiveRecord::Base.default_scope.
def scoped(options = nil)
if options
scoped.apply_finder_options(options)
else
current_scoped_methods ? relation.merge(current_scoped_methods) : relation.clone
end
end
def scopes
read_inheritable_attribute(:scopes) || write_inheritable_attribute(:scopes, {})
end
# Adds a class method for retrieving and querying objects. A \scope represents a narrowing of a database query,
# such as <tt>where(:color => :red).select('shirts.*').includes(:washing_instructions)</tt>.
#
# class Shirt < ActiveRecord::Base
# scope :red, where(:color => 'red')
# scope :dry_clean_only, joins(:washing_instructions).where('washing_instructions.dry_clean_only = ?', true)
# end
#
# The above calls to <tt>scope</tt> define class methods Shirt.red and Shirt.dry_clean_only. Shirt.red,
# in effect, represents the query <tt>Shirt.where(:color => 'red')</tt>.
#
# Unlike <tt>Shirt.find(...)</tt>, however, the object returned by Shirt.red is not an Array; it
# resembles the association object constructed by a <tt>has_many</tt> declaration. For instance,
# you can invoke <tt>Shirt.red.first</tt>, <tt>Shirt.red.count</tt>, <tt>Shirt.red.where(:size => 'small')</tt>.
# Also, just as with the association objects, named \scopes act like an Array, implementing Enumerable;
# <tt>Shirt.red.each(&block)</tt>, <tt>Shirt.red.first</tt>, and <tt>Shirt.red.inject(memo, &block)</tt>
# all behave as if Shirt.red really was an Array.
#
# These named \scopes are composable. For instance, <tt>Shirt.red.dry_clean_only</tt> will produce
# all shirts that are both red and dry clean only.
# Nested finds and calculations also work with these compositions: <tt>Shirt.red.dry_clean_only.count</tt>
# returns the number of garments for which these criteria obtain. Similarly with
# <tt>Shirt.red.dry_clean_only.average(:thread_count)</tt>.
#
# All \scopes are available as class methods on the ActiveRecord::Base descendant upon which
# the \scopes were defined. But they are also available to <tt>has_many</tt> associations. If,
#
# class Person < ActiveRecord::Base
# has_many :shirts
# end
#
# then <tt>elton.shirts.red.dry_clean_only</tt> will return all of Elton's red, dry clean
# only shirts.
#
# Named \scopes can also be procedural:
#
# class Shirt < ActiveRecord::Base
# scope :colored, lambda {|color| where(:color => color) }
# end
#
# In this example, <tt>Shirt.colored('puce')</tt> finds all puce shirts.
#
# Named \scopes can also have extensions, just as with <tt>has_many</tt> declarations:
#
# class Shirt < ActiveRecord::Base
# scope :red, where(:color => 'red') do
# def dom_id
# 'red_shirts'
# end
# end
# end
#
# Scopes can also be used while creating/building a record.
#
# class Article < ActiveRecord::Base
# scope :published, where(:published => true)
# end
#
# Article.published.new.published # => true
# Article.published.create.published # => true
def scope(name, scope_options = {}, &block)
name = name.to_sym
valid_scope_name?(name)
extension = Module.new(&block) if block_given?
scopes[name] = lambda do |*args|
options = scope_options.is_a?(Proc) ? scope_options.call(*args) : scope_options
relation = if options.is_a?(Hash)
scoped.apply_finder_options(options)
elsif options
scoped.merge(options)
else
scoped
end
extension ? relation.extending(extension) : relation
end
singleton_class.send(:redefine_method, name, &scopes[name])
end
def named_scope(*args, &block)
ActiveSupport::Deprecation.warn("Base.named_scope has been deprecated, please use Base.scope instead", caller)
scope(*args, &block)
end
protected
def valid_scope_name?(name)
if !scopes[name] && respond_to?(name, true)
logger.warn "Creating scope :#{name}. " \
"Overwriting existing method #{self.name}.#{name}."
end
end
end
end
end
| mzemel/kpsu.org | vendor/gems/ruby/1.8/gems/activerecord-3.0.3/lib/active_record/named_scope.rb | Ruby | gpl-3.0 | 5,522 |
// *******
// This is an internal file of the IMMERSED BOUNDARY implementation
// It should not be included by any main Espresso routines
// Functions to be exported for Espresso are in ibm_main.hpp
#include "config.hpp"
#ifdef IMMERSED_BOUNDARY
#include <mpi.h>
#include "cells.hpp"
#include "grid.hpp"
#include "communication.hpp"
#include "particle_data.hpp"
#include "integrate.hpp"
#include "immersed_boundary/ibm_cuda_interface.hpp"
/// MPI tags for sending velocities and receiving particles
#define REQ_CUDAIBMSENDVEL 0xcc03
#define REQ_CUDAIBMGETPART 0xcc04
// Variables for communication
IBM_CUDA_ParticleDataInput *IBM_ParticleDataInput_host = NULL;
IBM_CUDA_ParticleDataOutput *IBM_ParticleDataOutput_host = NULL;
/*****************
IBM_cuda_mpi_get_particles
Gather particle positions on the master node in order to communicate them to GPU
We transfer all particles (real and virtual), but acutally we would only need the virtual ones
Room for improvement...
*****************/
void IBM_cuda_mpi_get_particles_slave();
// Analogous to the usual cuda_mpi_get_particles function
void IBM_cuda_mpi_get_particles()
{
int g, pnode;
Cell *cell;
int c;
MPI_Status status;
int *sizes = (int*) Utils::malloc(sizeof(int)*n_nodes);
int n_part = cells_get_n_particles();
/* first collect number of particles on each node */
MPI_Gather(&n_part, 1, MPI_INT, sizes, 1, MPI_INT, 0, comm_cart);
if(this_node > 0){
/* call slave functions to provide the slave datas */
IBM_cuda_mpi_get_particles_slave();
}
else {
/* master: fetch particle informations into 'result' */
g = 0;
for (pnode = 0; pnode < n_nodes; pnode++) {
if (sizes[pnode] > 0) {
if (pnode == 0) {
for (c = 0; c < local_cells.n; c++) {
Particle *part;
int npart;
int dummy[3] = {0,0,0};
double pos[3];
cell = local_cells.cell[c];
part = cell->part;
npart = cell->n;
for (int i=0;i<npart;i++) {
memmove(pos, part[i].r.p, 3*sizeof(double));
fold_position(pos, dummy);
IBM_ParticleDataInput_host[i+g].pos[0] = (float)pos[0];
IBM_ParticleDataInput_host[i+g].pos[1] = (float)pos[1];
IBM_ParticleDataInput_host[i+g].pos[2] = (float)pos[2];
IBM_ParticleDataInput_host[i+g].f[0] = (float)part[i].f.f[0];
IBM_ParticleDataInput_host[i+g].f[1] = (float)part[i].f.f[1];
IBM_ParticleDataInput_host[i+g].f[2] = (float)part[i].f.f[2];
IBM_ParticleDataInput_host[i+g].isVirtual = part[i].p.isVirtual;
}
g += npart;
}
}
else {
MPI_Recv(&IBM_ParticleDataInput_host[g], sizes[pnode]*sizeof(IBM_CUDA_ParticleDataInput), MPI_BYTE, pnode, REQ_CUDAIBMGETPART,
comm_cart, &status);
g += sizes[pnode];
}
}
}
}
COMM_TRACE(fprintf(stderr, "%d: finished get\n", this_node));
free(sizes);
}
void IBM_cuda_mpi_get_particles_slave()
{
int n_part;
int g;
IBM_CUDA_ParticleDataInput *particle_input_sl;
Cell *cell;
int c, i;
n_part = cells_get_n_particles();
COMM_TRACE(fprintf(stderr, "%d: get_particles_slave, %d particles\n", this_node, n_part));
if (n_part > 0) {
/* get (unsorted) particle informations as an array of type 'particle' */
/* then get the particle information */
// particle_data_host_sl = (IBM_CUDA_ParticleDataInput*) Utils::malloc(n_part*sizeof(IBM_CUDA_ParticleData));
particle_input_sl = new IBM_CUDA_ParticleDataInput[n_part];
g = 0;
for (c = 0; c < local_cells.n; c++) {
Particle *part;
int npart;
int dummy[3] = {0,0,0};
double pos[3];
cell = local_cells.cell[c];
part = cell->part;
npart = cell->n;
for (i=0;i<npart;i++) {
memmove(pos, part[i].r.p, 3*sizeof(double));
fold_position(pos, dummy);
particle_input_sl[i+g].pos[0] = (float)pos[0];
particle_input_sl[i+g].pos[1] = (float)pos[1];
particle_input_sl[i+g].pos[2] = (float)pos[2];
particle_input_sl[i+g].f[0] = (float)part[i].f.f[0];
particle_input_sl[i+g].f[1] = (float)part[i].f.f[1];
particle_input_sl[i+g].f[2] = (float)part[i].f.f[2];
particle_input_sl[i+g].isVirtual = part[i].p.isVirtual;
}
g+=npart;
}
/* and send it back to the master node */
MPI_Send(particle_input_sl, n_part*sizeof(IBM_CUDA_ParticleDataInput), MPI_BYTE, 0, REQ_CUDAIBMGETPART, comm_cart);
delete []particle_input_sl;
}
}
/*****************
IBM_cuda_mpi_send_velocities
Particle velocities have been communicated from GPU, now transmit to all nodes
******************/
// Analogous to cuda_mpi_send_forces
void IBM_cuda_mpi_send_velocities_slave();
void IBM_cuda_mpi_send_velocities()
{
int n_part;
int g, pnode;
Cell *cell;
int c;
int *sizes;
sizes = (int *) Utils::malloc(sizeof(int)*n_nodes);
n_part = cells_get_n_particles();
/* first collect number of particles on each node */
MPI_Gather(&n_part, 1, MPI_INT, sizes, 1, MPI_INT, 0, comm_cart);
/* call slave functions to provide the slave datas */
if(this_node > 0) {
IBM_cuda_mpi_send_velocities_slave();
}
else{
/* fetch particle informations into 'result' */
g = 0;
for (pnode = 0; pnode < n_nodes; pnode++) {
if (sizes[pnode] > 0) {
if (pnode == 0) {
for (c = 0; c < local_cells.n; c++) {
int npart;
cell = local_cells.cell[c];
npart = cell->n;
for (int i=0;i<npart;i++)
{
if ( cell->part[i].p.isVirtual )
{
cell->part[i].m.v[0] = (double)IBM_ParticleDataOutput_host[(i+g)].v[0];
cell->part[i].m.v[1] = (double)IBM_ParticleDataOutput_host[(i+g)].v[1];
cell->part[i].m.v[2] = (double)IBM_ParticleDataOutput_host[(i+g)].v[2];
}
}
g += npart;
}
}
else {
/* and send it back to the slave node */
MPI_Send(&IBM_ParticleDataOutput_host[g], sizes[pnode]*sizeof(IBM_CUDA_ParticleDataOutput), MPI_BYTE, pnode, REQ_CUDAIBMSENDVEL, comm_cart);
g += sizes[pnode];
}
}
}
}
COMM_TRACE(fprintf(stderr, "%d: finished send\n", this_node));
free(sizes);
}
void IBM_cuda_mpi_send_velocities_slave()
{
Cell *cell;
int c, i;
MPI_Status status;
const int n_part = cells_get_n_particles();
COMM_TRACE(fprintf(stderr, "%d: send_particles_slave, %d particles\n", this_node, n_part));
if (n_part > 0) {
int g = 0;
IBM_CUDA_ParticleDataOutput *output_sl = new IBM_CUDA_ParticleDataOutput[n_part];
MPI_Recv(output_sl, n_part*sizeof(IBM_CUDA_ParticleDataOutput), MPI_BYTE, 0, REQ_CUDAIBMSENDVEL,
comm_cart, &status);
for (c = 0; c < local_cells.n; c++) {
int npart;
cell = local_cells.cell[c];
npart = cell->n;
for (i=0;i<npart;i++)
{
if ( cell->part[i].p.isVirtual )
{
cell->part[i].m.v[0] = (double)output_sl[(i+g)].v[0];
cell->part[i].m.v[1] = (double)output_sl[(i+g)].v[1];
cell->part[i].m.v[2] = (double)output_sl[(i+g)].v[2];
}
}
g += npart;
}
delete []output_sl;
}
}
#endif | sehrhardt/espresso | src/core/immersed_boundary/ibm_cuda_interface.cpp | C++ | gpl-3.0 | 7,514 |
# -*- coding: utf-8 -*-
##################################################################################
#
# Copyright (c) 2005-2006 Axelor SARL. (http://www.axelor.com)
# and 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# $Id: hr.py 4656 2006-11-24 09:58:42Z Cyp $
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
import math
import time
from operator import attrgetter
from openerp.exceptions import Warning
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_holidays_status(osv.osv):
_name = "hr.holidays.status"
_description = "Leave Type"
def get_days(self, cr, uid, ids, employee_id, context=None):
result = dict((id, dict(max_leaves=0, leaves_taken=0, remaining_leaves=0,
virtual_remaining_leaves=0)) for id in ids)
holiday_ids = self.pool['hr.holidays'].search(cr, uid, [('employee_id', '=', employee_id),
('state', 'in', ['confirm', 'validate1', 'validate']),
('holiday_status_id', 'in', ids)
], context=context)
for holiday in self.pool['hr.holidays'].browse(cr, uid, holiday_ids, context=context):
status_dict = result[holiday.holiday_status_id.id]
if holiday.type == 'add':
status_dict['virtual_remaining_leaves'] += holiday.number_of_days_temp
if holiday.state == 'validate':
status_dict['max_leaves'] += holiday.number_of_days_temp
status_dict['remaining_leaves'] += holiday.number_of_days_temp
elif holiday.type == 'remove': # number of days is negative
status_dict['virtual_remaining_leaves'] -= holiday.number_of_days_temp
if holiday.state == 'validate':
status_dict['leaves_taken'] += holiday.number_of_days_temp
status_dict['remaining_leaves'] -= holiday.number_of_days_temp
return result
def _user_left_days(self, cr, uid, ids, name, args, context=None):
employee_id = False
if context and 'employee_id' in context:
employee_id = context['employee_id']
else:
employee_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context)
if employee_ids:
employee_id = employee_ids[0]
if employee_id:
res = self.get_days(cr, uid, ids, employee_id, context=context)
else:
res = dict((res_id, {'leaves_taken': 0, 'remaining_leaves': 0, 'max_leaves': 0}) for res_id in ids)
return res
_columns = {
'name': fields.char('Leave Type', size=64, required=True, translate=True),
'categ_id': fields.many2one('calendar.event.type', 'Meeting Type',
help='Once a leave is validated, Odoo will create a corresponding meeting of this type in the calendar.'),
'color_name': fields.selection([('red', 'Red'),('blue','Blue'), ('lightgreen', 'Light Green'), ('lightblue','Light Blue'), ('lightyellow', 'Light Yellow'), ('magenta', 'Magenta'),('lightcyan', 'Light Cyan'),('black', 'Black'),('lightpink', 'Light Pink'),('brown', 'Brown'),('violet', 'Violet'),('lightcoral', 'Light Coral'),('lightsalmon', 'Light Salmon'),('lavender', 'Lavender'),('wheat', 'Wheat'),('ivory', 'Ivory')],'Color in Report', required=True, help='This color will be used in the leaves summary located in Reporting\Leaves by Department.'),
'limit': fields.boolean('Allow to Override Limit', help='If you select this check box, the system allows the employees to take more leaves than the available ones for this type and will not take them into account for the "Remaining Legal Leaves" defined on the employee form.'),
'active': fields.boolean('Active', help="If the active field is set to false, it will allow you to hide the leave type without removing it."),
'max_leaves': fields.function(_user_left_days, string='Maximum Allowed', help='This value is given by the sum of all holidays requests with a positive value.', multi='user_left_days'),
'leaves_taken': fields.function(_user_left_days, string='Leaves Already Taken', help='This value is given by the sum of all holidays requests with a negative value.', multi='user_left_days'),
'remaining_leaves': fields.function(_user_left_days, string='Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken', multi='user_left_days'),
'virtual_remaining_leaves': fields.function(_user_left_days, string='Virtual Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken - Leaves Waiting Approval', multi='user_left_days'),
'double_validation': fields.boolean('Apply Double Validation', help="When selected, the Allocation/Leave Requests for this type require a second validation to be approved."),
}
_defaults = {
'color_name': 'red',
'active': True,
}
def name_get(self, cr, uid, ids, context=None):
if context is None:
context = {}
if not context.get('employee_id',False):
# leave counts is based on employee_id, would be inaccurate if not based on correct employee
return super(hr_holidays_status, self).name_get(cr, uid, ids, context=context)
res = []
for record in self.browse(cr, uid, ids, context=context):
name = record.name
if not record.limit:
name = name + (' (%g/%g)' % (record.leaves_taken or 0.0, record.max_leaves or 0.0))
res.append((record.id, name))
return res
class hr_holidays(osv.osv):
_name = "hr.holidays"
_description = "Leave"
_order = "type desc, date_from asc"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_track = {
'state': {
'hr_holidays.mt_holidays_approved': lambda self, cr, uid, obj, ctx=None: obj.state == 'validate',
'hr_holidays.mt_holidays_refused': lambda self, cr, uid, obj, ctx=None: obj.state == 'refuse',
'hr_holidays.mt_holidays_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state == 'confirm',
},
}
def _employee_get(self, cr, uid, context=None):
emp_id = context.get('default_employee_id', False)
if emp_id:
return emp_id
ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context)
if ids:
return ids[0]
return False
def _compute_number_of_days(self, cr, uid, ids, name, args, context=None):
result = {}
for hol in self.browse(cr, uid, ids, context=context):
if hol.type=='remove':
result[hol.id] = -hol.number_of_days_temp
else:
result[hol.id] = hol.number_of_days_temp
return result
def _get_can_reset(self, cr, uid, ids, name, arg, context=None):
"""User can reset a leave request if it is its own leave request or if
he is an Hr Manager. """
user = self.pool['res.users'].browse(cr, uid, uid, context=context)
group_hr_manager_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'group_hr_manager')[1]
if group_hr_manager_id in [g.id for g in user.groups_id]:
return dict.fromkeys(ids, True)
result = dict.fromkeys(ids, False)
for holiday in self.browse(cr, uid, ids, context=context):
if holiday.employee_id and holiday.employee_id.user_id and holiday.employee_id.user_id.id == uid:
result[holiday.id] = True
return result
def _check_date(self, cr, uid, ids, context=None):
for holiday in self.browse(cr, uid, ids, context=context):
domain = [
('date_from', '<=', holiday.date_to),
('date_to', '>=', holiday.date_from),
('employee_id', '=', holiday.employee_id.id),
('id', '!=', holiday.id),
('state', 'not in', ['cancel', 'refuse']),
]
nholidays = self.search_count(cr, uid, domain, context=context)
if nholidays:
return False
return True
_check_holidays = lambda self, cr, uid, ids, context=None: self.check_holidays(cr, uid, ids, context=context)
_columns = {
'name': fields.char('Description', size=64),
'state': fields.selection([('draft', 'To Submit'), ('cancel', 'Cancelled'),('confirm', 'To Approve'), ('refuse', 'Refused'), ('validate1', 'Second Approval'), ('validate', 'Approved')],
'Status', readonly=True, track_visibility='onchange', copy=False,
help='The status is set to \'To Submit\', when a holiday request is created.\
\nThe status is \'To Approve\', when holiday request is confirmed by user.\
\nThe status is \'Refused\', when holiday request is refused by manager.\
\nThe status is \'Approved\', when holiday request is approved by manager.'),
'user_id':fields.related('employee_id', 'user_id', type='many2one', relation='res.users', string='User', store=True),
'date_from': fields.datetime('Start Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, select=True, copy=False),
'date_to': fields.datetime('End Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False),
'holiday_status_id': fields.many2one("hr.holidays.status", "Leave Type", required=True,readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'employee_id': fields.many2one('hr.employee', "Employee", select=True, invisible=False, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'manager_id': fields.many2one('hr.employee', 'First Approval', invisible=False, readonly=True, copy=False,
help='This area is automatically filled by the user who validate the leave'),
'notes': fields.text('Reasons',readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'number_of_days_temp': fields.float('Allocation', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False),
'number_of_days': fields.function(_compute_number_of_days, string='Number of Days', store=True),
'meeting_id': fields.many2one('calendar.event', 'Meeting'),
'type': fields.selection([('remove','Leave Request'),('add','Allocation Request')], 'Request Type', required=True, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help="Choose 'Leave Request' if someone wants to take an off-day. \nChoose 'Allocation Request' if you want to increase the number of leaves available for someone", select=True),
'parent_id': fields.many2one('hr.holidays', 'Parent'),
'linked_request_ids': fields.one2many('hr.holidays', 'parent_id', 'Linked Requests',),
'department_id':fields.related('employee_id', 'department_id', string='Department', type='many2one', relation='hr.department', readonly=True, store=True),
'category_id': fields.many2one('hr.employee.category', "Employee Tag", help='Category of Employee', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'holiday_type': fields.selection([('employee','By Employee'),('category','By Employee Tag')], 'Allocation Mode', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help='By Employee: Allocation/Request for individual Employee, By Employee Tag: Allocation/Request for group of employees in category', required=True),
'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, copy=False,
help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)'),
'double_validation': fields.related('holiday_status_id', 'double_validation', type='boolean', relation='hr.holidays.status', string='Apply Double Validation'),
'can_reset': fields.function(
_get_can_reset,
type='boolean'),
}
_defaults = {
'employee_id': _employee_get,
'state': 'confirm',
'type': 'remove',
'user_id': lambda obj, cr, uid, context: uid,
'holiday_type': 'employee'
}
_constraints = [
(_check_date, 'You can not have 2 leaves that overlaps on same day!', ['date_from','date_to']),
(_check_holidays, 'The number of remaining leaves is not sufficient for this leave type', ['state','number_of_days_temp'])
]
_sql_constraints = [
('type_value', "CHECK( (holiday_type='employee' AND employee_id IS NOT NULL) or (holiday_type='category' AND category_id IS NOT NULL))",
"The employee or employee category of this request is missing. Please make sure that your user login is linked to an employee."),
('date_check2', "CHECK ( (type='add') OR (date_from <= date_to))", "The start date must be anterior to the end date."),
('date_check', "CHECK ( number_of_days_temp >= 0 )", "The number of days must be greater than 0."),
]
def _create_resource_leave(self, cr, uid, leaves, context=None):
'''This method will create entry in resource calendar leave object at the time of holidays validated '''
obj_res_leave = self.pool.get('resource.calendar.leaves')
for leave in leaves:
vals = {
'name': leave.name,
'date_from': leave.date_from,
'holiday_id': leave.id,
'date_to': leave.date_to,
'resource_id': leave.employee_id.resource_id.id,
'calendar_id': leave.employee_id.resource_id.calendar_id.id
}
obj_res_leave.create(cr, uid, vals, context=context)
return True
def _remove_resource_leave(self, cr, uid, ids, context=None):
'''This method will create entry in resource calendar leave object at the time of holidays cancel/removed'''
obj_res_leave = self.pool.get('resource.calendar.leaves')
leave_ids = obj_res_leave.search(cr, uid, [('holiday_id', 'in', ids)], context=context)
return obj_res_leave.unlink(cr, uid, leave_ids, context=context)
def onchange_type(self, cr, uid, ids, holiday_type, employee_id=False, context=None):
result = {}
if holiday_type == 'employee' and not employee_id:
ids_employee = self.pool.get('hr.employee').search(cr, uid, [('user_id','=', uid)])
if ids_employee:
result['value'] = {
'employee_id': ids_employee[0]
}
elif holiday_type != 'employee':
result['value'] = {
'employee_id': False
}
return result
def onchange_employee(self, cr, uid, ids, employee_id):
result = {'value': {'department_id': False}}
if employee_id:
employee = self.pool.get('hr.employee').browse(cr, uid, employee_id)
result['value'] = {'department_id': employee.department_id.id}
return result
# TODO: can be improved using resource calendar method
def _get_number_of_days(self, date_from, date_to):
"""Returns a float equals to the timedelta between two dates given as string."""
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
from_dt = datetime.datetime.strptime(date_from, DATETIME_FORMAT)
to_dt = datetime.datetime.strptime(date_to, DATETIME_FORMAT)
timedelta = to_dt - from_dt
diff_day = timedelta.days + float(timedelta.seconds) / 86400
return diff_day
def unlink(self, cr, uid, ids, context=None):
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ['draft', 'cancel', 'confirm']:
raise osv.except_osv(_('Warning!'),_('You cannot delete a leave which is in %s state.')%(rec.state))
return super(hr_holidays, self).unlink(cr, uid, ids, context)
def onchange_date_from(self, cr, uid, ids, date_to, date_from):
"""
If there are no date set for date_to, automatically set one 8 hours later than
the date_from.
Also update the number_of_days.
"""
# date_to has to be greater than date_from
if (date_from and date_to) and (date_from > date_to):
raise osv.except_osv(_('Warning!'),_('The start date must be anterior to the end date.'))
result = {'value': {}}
# No date_to set so far: automatically compute one 8 hours later
if date_from and not date_to:
date_to_with_delta = datetime.datetime.strptime(date_from, tools.DEFAULT_SERVER_DATETIME_FORMAT) + datetime.timedelta(hours=8)
result['value']['date_to'] = str(date_to_with_delta)
# Compute and update the number of days
if (date_to and date_from) and (date_from <= date_to):
diff_day = self._get_number_of_days(date_from, date_to)
result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1
else:
result['value']['number_of_days_temp'] = 0
return result
def onchange_date_to(self, cr, uid, ids, date_to, date_from):
"""
Update the number_of_days.
"""
# date_to has to be greater than date_from
if (date_from and date_to) and (date_from > date_to):
raise osv.except_osv(_('Warning!'),_('The start date must be anterior to the end date.'))
result = {'value': {}}
# Compute and update the number of days
if (date_to and date_from) and (date_from <= date_to):
diff_day = self._get_number_of_days(date_from, date_to)
result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1
else:
result['value']['number_of_days_temp'] = 0
return result
def add_follower(self, cr, uid, ids, employee_id, context=None):
employee = self.pool['hr.employee'].browse(cr, uid, employee_id, context=context)
if employee.user_id:
self.message_subscribe(cr, uid, ids, [employee.user_id.partner_id.id], context=context)
def create(self, cr, uid, values, context=None):
""" Override to avoid automatic logging of creation """
if context is None:
context = {}
employee_id = values.get('employee_id', False)
context = dict(context, mail_create_nolog=True, mail_create_nosubscribe=True)
if values.get('state') and values['state'] not in ['draft', 'confirm', 'cancel'] and not self.pool['res.users'].has_group(cr, uid, 'base.group_hr_user'):
raise osv.except_osv(_('Warning!'), _('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % values.get('state'))
hr_holiday_id = super(hr_holidays, self).create(cr, uid, values, context=context)
self.add_follower(cr, uid, [hr_holiday_id], employee_id, context=context)
return hr_holiday_id
def write(self, cr, uid, ids, vals, context=None):
employee_id = vals.get('employee_id', False)
if vals.get('state') and vals['state'] not in ['draft', 'confirm', 'cancel'] and not self.pool['res.users'].has_group(cr, uid, 'base.group_hr_user'):
raise osv.except_osv(_('Warning!'), _('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % vals.get('state'))
hr_holiday_id = super(hr_holidays, self).write(cr, uid, ids, vals, context=context)
self.add_follower(cr, uid, ids, employee_id, context=context)
return hr_holiday_id
def holidays_reset(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {
'state': 'draft',
'manager_id': False,
'manager_id2': False,
})
to_unlink = []
for record in self.browse(cr, uid, ids, context=context):
for record2 in record.linked_request_ids:
self.holidays_reset(cr, uid, [record2.id], context=context)
to_unlink.append(record2.id)
if to_unlink:
self.unlink(cr, uid, to_unlink, context=context)
return True
def holidays_first_validate(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
self.holidays_first_validate_notificate(cr, uid, ids, context=context)
return self.write(cr, uid, ids, {'state':'validate1', 'manager_id': manager})
def holidays_validate(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
self.write(cr, uid, ids, {'state':'validate'})
data_holiday = self.browse(cr, uid, ids)
for record in data_holiday:
if record.double_validation:
self.write(cr, uid, [record.id], {'manager_id2': manager})
else:
self.write(cr, uid, [record.id], {'manager_id': manager})
if record.holiday_type == 'employee' and record.type == 'remove':
meeting_obj = self.pool.get('calendar.event')
meeting_vals = {
'name': record.name or _('Leave Request'),
'categ_ids': record.holiday_status_id.categ_id and [(6,0,[record.holiday_status_id.categ_id.id])] or [],
'duration': record.number_of_days_temp * 8,
'description': record.notes,
'user_id': record.user_id.id,
'start': record.date_from,
'stop': record.date_to,
'allday': False,
'state': 'open', # to block that meeting date in the calendar
'class': 'confidential'
}
#Add the partner_id (if exist) as an attendee
if record.user_id and record.user_id.partner_id:
meeting_vals['partner_ids'] = [(4,record.user_id.partner_id.id)]
ctx_no_email = dict(context or {}, no_email=True)
meeting_id = meeting_obj.create(cr, uid, meeting_vals, context=ctx_no_email)
self._create_resource_leave(cr, uid, [record], context=context)
self.write(cr, uid, ids, {'meeting_id': meeting_id})
elif record.holiday_type == 'category':
emp_ids = obj_emp.search(cr, uid, [('category_ids', 'child_of', [record.category_id.id])])
leave_ids = []
batch_context = dict(context, mail_notify_force_send=False)
for emp in obj_emp.browse(cr, uid, emp_ids, context=context):
vals = {
'name': record.name,
'type': record.type,
'holiday_type': 'employee',
'holiday_status_id': record.holiday_status_id.id,
'date_from': record.date_from,
'date_to': record.date_to,
'notes': record.notes,
'number_of_days_temp': record.number_of_days_temp,
'parent_id': record.id,
'employee_id': emp.id
}
leave_ids.append(self.create(cr, uid, vals, context=batch_context))
for leave_id in leave_ids:
# TODO is it necessary to interleave the calls?
for sig in ('confirm', 'validate', 'second_validate'):
self.signal_workflow(cr, uid, [leave_id], sig)
return True
def holidays_confirm(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if record.employee_id and record.employee_id.parent_id and record.employee_id.parent_id.user_id:
self.message_subscribe_users(cr, uid, [record.id], user_ids=[record.employee_id.parent_id.user_id.id], context=context)
return self.write(cr, uid, ids, {'state': 'confirm'})
def holidays_refuse(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
for holiday in self.browse(cr, uid, ids, context=context):
if holiday.state == 'validate1':
self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id': manager})
else:
self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id2': manager})
self.holidays_cancel(cr, uid, ids, context=context)
return True
def holidays_cancel(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
# Delete the meeting
if record.meeting_id:
record.meeting_id.unlink()
# If a category that created several holidays, cancel all related
self.signal_workflow(cr, uid, map(attrgetter('id'), record.linked_request_ids or []), 'refuse')
self._remove_resource_leave(cr, uid, ids, context=context)
return True
def check_holidays(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if record.holiday_type != 'employee' or record.type != 'remove' or not record.employee_id or record.holiday_status_id.limit:
continue
leave_days = self.pool.get('hr.holidays.status').get_days(cr, uid, [record.holiday_status_id.id], record.employee_id.id, context=context)[record.holiday_status_id.id]
if leave_days['remaining_leaves'] < 0 or leave_days['virtual_remaining_leaves'] < 0:
# Raising a warning gives a more user-friendly feedback than the default constraint error
raise Warning(_('The number of remaining leaves is not sufficient for this leave type.\n'
'Please verify also the leaves waiting for validation.'))
return True
# -----------------------------
# OpenChatter and notifications
# -----------------------------
def _needaction_domain_get(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
empids = emp_obj.search(cr, uid, [('parent_id.user_id', '=', uid)], context=context)
dom = ['&', ('state', '=', 'confirm'), ('employee_id', 'in', empids)]
# if this user is a hr.manager, he should do second validations
if self.pool.get('res.users').has_group(cr, uid, 'base.group_hr_manager'):
dom = ['|'] + dom + [('state', '=', 'validate1')]
return dom
def holidays_first_validate_notificate(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
self.message_post(cr, uid, [obj.id],
_("Request approved, waiting second validation."), context=context)
class resource_calendar_leaves(osv.osv):
_inherit = "resource.calendar.leaves"
_description = "Leave Detail"
_columns = {
'holiday_id': fields.many2one("hr.holidays", "Leave Request"),
}
class hr_employee(osv.osv):
_inherit="hr.employee"
def create(self, cr, uid, vals, context=None):
# don't pass the value of remaining leave if it's 0 at the creation time, otherwise it will trigger the inverse
# function _set_remaining_days and the system may not be configured for. Note that we don't have this problem on
# the write because the clients only send the fields that have been modified.
if 'remaining_leaves' in vals and not vals['remaining_leaves']:
del(vals['remaining_leaves'])
return super(hr_employee, self).create(cr, uid, vals, context=context)
def _set_remaining_days(self, cr, uid, empl_id, name, value, arg, context=None):
employee = self.browse(cr, uid, empl_id, context=context)
diff = value - employee.remaining_leaves
type_obj = self.pool.get('hr.holidays.status')
holiday_obj = self.pool.get('hr.holidays')
# Find for holidays status
status_ids = type_obj.search(cr, uid, [('limit', '=', False)], context=context)
if len(status_ids) != 1 :
raise osv.except_osv(_('Warning!'),_("The feature behind the field 'Remaining Legal Leaves' can only be used when there is only one leave type with the option 'Allow to Override Limit' unchecked. (%s Found). Otherwise, the update is ambiguous as we cannot decide on which leave type the update has to be done. \nYou may prefer to use the classic menus 'Leave Requests' and 'Allocation Requests' located in 'Human Resources \ Leaves' to manage the leave days of the employees if the configuration does not allow to use this field.") % (len(status_ids)))
status_id = status_ids and status_ids[0] or False
if not status_id:
return False
if diff > 0:
leave_id = holiday_obj.create(cr, uid, {'name': _('Allocation for %s') % employee.name, 'employee_id': employee.id, 'holiday_status_id': status_id, 'type': 'add', 'holiday_type': 'employee', 'number_of_days_temp': diff}, context=context)
elif diff < 0:
raise osv.except_osv(_('Warning!'), _('You cannot reduce validated allocation requests'))
else:
return False
for sig in ('confirm', 'validate', 'second_validate'):
holiday_obj.signal_workflow(cr, uid, [leave_id], sig)
return True
def _get_remaining_days(self, cr, uid, ids, name, args, context=None):
cr.execute("""SELECT
sum(h.number_of_days) as days,
h.employee_id
from
hr_holidays h
join hr_holidays_status s on (s.id=h.holiday_status_id)
where
h.state='validate' and
s.limit=False and
h.employee_id in %s
group by h.employee_id""", (tuple(ids),))
res = cr.dictfetchall()
remaining = {}
for r in res:
remaining[r['employee_id']] = r['days']
for employee_id in ids:
if not remaining.get(employee_id):
remaining[employee_id] = 0.0
return remaining
def _get_leave_status(self, cr, uid, ids, name, args, context=None):
holidays_obj = self.pool.get('hr.holidays')
holidays_id = holidays_obj.search(cr, uid,
[('employee_id', 'in', ids), ('date_from','<=',time.strftime('%Y-%m-%d %H:%M:%S')),
('date_to','>=',time.strftime('%Y-%m-%d 23:59:59')),('type','=','remove'),('state','not in',('cancel','refuse'))],
context=context)
result = {}
for id in ids:
result[id] = {
'current_leave_state': False,
'current_leave_id': False,
'leave_date_from':False,
'leave_date_to':False,
}
for holiday in self.pool.get('hr.holidays').browse(cr, uid, holidays_id, context=context):
result[holiday.employee_id.id]['leave_date_from'] = holiday.date_from
result[holiday.employee_id.id]['leave_date_to'] = holiday.date_to
result[holiday.employee_id.id]['current_leave_state'] = holiday.state
result[holiday.employee_id.id]['current_leave_id'] = holiday.holiday_status_id.id
return result
def _leaves_count(self, cr, uid, ids, field_name, arg, context=None):
Holidays = self.pool['hr.holidays']
return {
employee_id: Holidays.search_count(cr,uid, [('employee_id', '=', employee_id), ('type', '=', 'remove')], context=context)
for employee_id in ids
}
_columns = {
'remaining_leaves': fields.function(_get_remaining_days, string='Remaining Legal Leaves', fnct_inv=_set_remaining_days, type="float", help='Total number of legal leaves allocated to this employee, change this value to create allocation/leave request. Total based on all the leave types without overriding limit.'),
'current_leave_state': fields.function(_get_leave_status, multi="leave_status", string="Current Leave Status", type="selection",
selection=[('draft', 'New'), ('confirm', 'Waiting Approval'), ('refuse', 'Refused'),
('validate1', 'Waiting Second Approval'), ('validate', 'Approved'), ('cancel', 'Cancelled')]),
'current_leave_id': fields.function(_get_leave_status, multi="leave_status", string="Current Leave Type",type='many2one', relation='hr.holidays.status'),
'leave_date_from': fields.function(_get_leave_status, multi='leave_status', type='date', string='From Date'),
'leave_date_to': fields.function(_get_leave_status, multi='leave_status', type='date', string='To Date'),
'leaves_count': fields.function(_leaves_count, type='integer', string='Leaves'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ncliam/serverpos | openerp/addons/hr_holidays/hr_holidays.py | Python | agpl-3.0 | 34,256 |
<?php
/**
*
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2013 SugarCRM Inc.
*
* SuiteCRM is an extension to SugarCRM Community Edition developed by SalesAgility Ltd.
* Copyright (C) 2011 - 2018 SalesAgility Ltd.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo and "Supercharged by SuiteCRM" logo. If the display of the logos is not
* reasonably feasible for technical reasons, the Appropriate Legal Notices must
* display the words "Powered by SugarCRM" and "Supercharged by SuiteCRM".
*/
/**
* Generic filter
* @api
*/
class default_filter
{
public $_component;
public function __construct()
{
}
/**
* @deprecated deprecated since version 7.6, PHP4 Style Constructors are deprecated and will be remove in 7.8, please update your code, use __construct instead
*/
public function default_filter()
{
$deprecatedMessage = 'PHP4 Style Constructors are deprecated and will be remove in 7.8, please update your code';
if (isset($GLOBALS['log'])) {
$GLOBALS['log']->deprecated($deprecatedMessage);
} else {
trigger_error($deprecatedMessage, E_USER_DEPRECATED);
}
self::__construct();
}
public function setComponent($component)
{
$this->_component = $component;
}
/**
* getList
* Returns a nested array containing a key/value pair(s) of a source record
*
* @param array $args Array of arguments to search/filter by
* @param string $module String optional value of the module that the connector framework is attempting to map to
* @return array of key/value pair(s) of source record; empty Array if no results are found
*/
public function getList($args, $module)
{
$args = $this->_component->mapInput($args, $module);
return $this->_component->getSource()->getList($args, $module);
}
}
| ChangezKhan/SuiteCRM | include/connectors/filters/default/filter.php | PHP | agpl-3.0 | 3,456 |
// ---------------------------------------------------------------------
//
// Copyright (C) 1998 - 2014 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
// check accuracy of various quadrature formulas by using them to
// integrate polynomials of increasing degree, and finding the degree
// until which they integrate exactly
#include "../tests.h"
#include <iomanip>
#include <fstream>
#include <deal.II/base/logstream.h>
#include <deal.II/base/quadrature_lib.h>
#include <deal.II/base/qprojector.h>
#include <cmath>
template <int dim>
void
fill_vector (std::vector<Quadrature<dim> *> &quadratures)
{
quadratures.push_back (new QMidpoint<dim>());
quadratures.push_back (new QTrapez<dim>());
quadratures.push_back (new QSimpson<dim>());
quadratures.push_back (new QMilne<dim>());
quadratures.push_back (new QWeddle<dim>());
for (unsigned int i=0; i<9; ++i)
{
quadratures.push_back (new QGauss<dim>(i));
}
QMilne<1> q1d;
quadratures.push_back (new Quadrature<dim>(q1d));
for (unsigned int i=2; i<8; ++i)
{
quadratures.push_back (new QGaussLobatto<dim>(i));
}
}
template <int dim>
void
check_cells (std::vector<Quadrature<dim>*> &quadratures)
{
Quadrature<dim> quadrature;
for (unsigned int n=0; n<quadratures.size(); ++n)
{
quadrature = *quadratures[n];
const std::vector<Point<dim> > &points=quadrature.get_points();
const std::vector<double> &weights=quadrature.get_weights();
deallog << "Quadrature no." << n;
unsigned int i=0;
double quadrature_int=0;
double exact_int=0;
double err = 0;
do
{
++i;
quadrature_int=0;
// Check the polynomial x^i*y^i
for (unsigned int x=0; x<quadrature.size(); ++x)
{
double f=1.;
switch (dim)
{
case 3:
f *= std::pow(static_cast<double>(points[x](2)), i*1.0);
case 2:
f *= std::pow(static_cast<double>(points[x](1)), i*1.0);
case 1:
f *= std::pow(static_cast<double>(points[x](0)), i*1.0);
}
quadrature_int+=f*weights[x];
}
// the exact integral is 1/(i+1)
exact_int=1./std::pow(static_cast<double>(i+1),dim);
err = std::fabs(quadrature_int-exact_int);
}
while (err<1e-14);
// Uncomment here for testing
// deallog << " (Int " << quadrature_int << ',' << exact_int << ")";
deallog << " is exact for polynomials of degree " << i-1 << std::endl;
if (dim==1)
{
// check the ordering of
// the quadrature points
bool in_order=true;
for (unsigned int x=1; x<quadrature.size(); ++x)
{
if (points[x](0)<=points[x-1](0))
in_order=false;
}
if (!in_order)
for (unsigned int x=0; x<quadrature.size(); ++x)
deallog << points[x] << std::endl;
}
}
}
template <int dim>
void
check_faces (const std::vector<Quadrature<dim-1>*>& quadratures, const bool sub)
{
if (sub)
deallog.push("subfaces");
else
deallog.push("faces");
for (unsigned int n=0; n<quadratures.size(); ++n)
{
Quadrature<dim> quadrature (sub == false?
QProjector<dim>::project_to_all_faces(*quadratures[n]) :
QProjector<dim>::project_to_all_subfaces(*quadratures[n]));
const std::vector<Point<dim> > &points=quadrature.get_points();
const std::vector<double> &weights=quadrature.get_weights();
deallog << "Quadrature no." << n;
unsigned int i=0;
long double quadrature_int=0;
double exact_int=0;
double err = 0;
do
{
++i;
quadrature_int=0;
// Check the polynomial
// x^i*y^i*z^i
for (unsigned int x=0; x<quadrature.size(); ++x)
{
long double f=1.;
switch (dim)
{
case 3:
f *= std::pow((long double) points[x](2), i*1.0L);
case 2:
f *= std::pow((long double) points[x](1), i*1.0L);
case 1:
f *= std::pow((long double) points[x](0), i*1.0L);
}
quadrature_int+=f*weights[x];
}
// the exact integral is
// 1/(i+1)^(dim-1)
switch (dim)
{
case 2:
exact_int = 2 * (sub ? 2:1) / (double) (i+1);
break;
case 3:
exact_int = 3 * (sub ? (4+2+2):1)*8 / (double) (i+1)/(i+1);
break;
}
err = std::fabs(quadrature_int-exact_int);
}
// for comparison: use factor 8 in case
// of dim==3, as we integrate 8 times
// over the whole surface (all
// combinations of face_orientation,
// face_flip and face_rotation)
while (err < (dim==3 ? 8 : 1) * 2e-14);
// Uncomment here for testing
// deallog << " (Int " << quadrature_int << '-' << exact_int << '=' << err << ")";
deallog << " is exact for polynomials of degree " << i-1 << std::endl;
}
deallog.pop();
}
int main()
{
std::ofstream logfile("output");
deallog.attach(logfile);
deallog.depth_console(0);
deallog.threshold_double(1.e-10);
std::vector<Quadrature<1> *> q1;
std::vector<Quadrature<2> *> q2;
std::vector<Quadrature<3> *> q3;
fill_vector (q1);
fill_vector (q2);
fill_vector (q3);
deallog.push("1d");
check_cells(q1);
deallog.pop();
deallog.push("2d");
check_cells(q2);
check_faces<2>(q1,false);
check_faces<2>(q1,true);
deallog.pop();
deallog.push("3d");
check_cells(q3);
check_faces<3>(q2,false);
check_faces<3>(q2,true);
deallog.pop();
// delete objects again to avoid
// messages about memory leaks when
// using purify or other memory
// checkers
for (unsigned int i=0; i<q1.size(); ++i)
delete q1[i];
for (unsigned int i=0; i<q2.size(); ++i)
delete q2[i];
for (unsigned int i=0; i<q3.size(); ++i)
delete q3[i];
}
| johntfoster/dealii | tests/base/quadrature_test.cc | C++ | lgpl-2.1 | 6,728 |
// ---------------------------------------------------------------------
//
// Copyright (C) 2003 - 2014 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
// test that the grid generated by GridGenerator::cylinder_shell<3> works as
// expected
#include "../tests.h"
#include <deal.II/base/logstream.h>
#include <deal.II/grid/tria.h>
#include <deal.II/grid/tria_iterator.h>
#include <deal.II/grid/tria_accessor.h>
#include <deal.II/grid/grid_generator.h>
#include <fstream>
#include <iomanip>
int main ()
{
std::ofstream logfile("output");
deallog.attach(logfile);
deallog.depth_console(0);
deallog.threshold_double(1.e-10);
deallog << std::setprecision (2);
// generate a hyperball in 3d
Triangulation<3> tria;
GridGenerator::cylinder_shell (tria, 1, .8, 1);
// make sure that all cells have positive
// volume
for (Triangulation<3>::active_cell_iterator cell=tria.begin_active();
cell!=tria.end(); ++cell)
deallog << cell << ' ' << cell->measure () << std::endl;
}
| natashasharma/dealii | tests/bits/cylinder_shell_01.cc | C++ | lgpl-2.1 | 1,499 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import org.apache.lucene.document.NumericDocValuesField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexingSlowLog.SlowLogParsedDocumentPrinter;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
public class IndexingSlowLogTests extends ESTestCase {
public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes();
ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id",
"test", null, null, source, XContentType.JSON, null);
Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[]
SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 0);
assertThat(p.toString(), not(containsString("source[")));
// Turning on document logging logs the whole thing
p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, Integer.MAX_VALUE);
assertThat(p.toString(), containsString("source[{\"foo\":\"bar\"}]"));
// And you can truncate the source
p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3);
assertThat(p.toString(), containsString("source[{\"f]"));
// And you can truncate the source
p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3);
assertThat(p.toString(), containsString("source[{\"f]"));
assertThat(p.toString(), startsWith("[foo/123] took"));
}
public void testReformatSetting() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), false)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
IndexingSlowLog log = new IndexingSlowLog(settings);
assertFalse(log.isReformat());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "true").build()));
assertTrue(log.isReformat());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "false").build()));
assertFalse(log.isReformat());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
assertTrue(log.isReformat());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
log = new IndexingSlowLog(settings);
assertTrue(log.isReformat());
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "NOT A BOOLEAN").build()));
fail();
} catch (IllegalArgumentException ex) {
final String expected = "illegal value can't update [index.indexing.slowlog.reformat] from [true] to [NOT A BOOLEAN]";
assertThat(ex, hasToString(containsString(expected)));
assertNotNull(ex.getCause());
assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class));
final IllegalArgumentException cause = (IllegalArgumentException) ex.getCause();
assertThat(cause, hasToString(containsString("Failed to parse value [NOT A BOOLEAN] as only [true] or [false] are allowed.")));
}
assertTrue(log.isReformat());
}
public void testLevelSetting() {
SlowLogLevel level = randomFrom(SlowLogLevel.values());
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
IndexingSlowLog log = new IndexingSlowLog(settings);
assertEquals(level, log.getLevel());
level = randomFrom(SlowLogLevel.values());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build()));
assertEquals(level, log.getLevel());
level = randomFrom(SlowLogLevel.values());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build()));
assertEquals(level, log.getLevel());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build()));
assertEquals(level, log.getLevel());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
assertEquals(SlowLogLevel.TRACE, log.getLevel());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
log = new IndexingSlowLog(settings);
assertTrue(log.isReformat());
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), "NOT A LEVEL").build()));
fail();
} catch (IllegalArgumentException ex) {
final String expected = "illegal value can't update [index.indexing.slowlog.level] from [TRACE] to [NOT A LEVEL]";
assertThat(ex, hasToString(containsString(expected)));
assertNotNull(ex.getCause());
assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class));
final IllegalArgumentException cause = (IllegalArgumentException) ex.getCause();
assertThat(cause, hasToString(containsString("No enum constant org.elasticsearch.index.SlowLogLevel.NOT A LEVEL")));
}
assertEquals(SlowLogLevel.TRACE, log.getLevel());
}
public void testSetLevels() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "100ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "200ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "300ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "400ms")
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
IndexingSlowLog log = new IndexingSlowLog(settings);
assertEquals(TimeValue.timeValueMillis(100).nanos(), log.getIndexTraceThreshold());
assertEquals(TimeValue.timeValueMillis(200).nanos(), log.getIndexDebugThreshold());
assertEquals(TimeValue.timeValueMillis(300).nanos(), log.getIndexInfoThreshold());
assertEquals(TimeValue.timeValueMillis(400).nanos(), log.getIndexWarnThreshold());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "120ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "220ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "320ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "420ms").build()));
assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getIndexTraceThreshold());
assertEquals(TimeValue.timeValueMillis(220).nanos(), log.getIndexDebugThreshold());
assertEquals(TimeValue.timeValueMillis(320).nanos(), log.getIndexInfoThreshold());
assertEquals(TimeValue.timeValueMillis(420).nanos(), log.getIndexWarnThreshold());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings.updateIndexMetaData(metaData);
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexTraceThreshold());
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexDebugThreshold());
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexInfoThreshold());
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexWarnThreshold());
settings = new IndexSettings(metaData, Settings.EMPTY);
log = new IndexingSlowLog(settings);
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexTraceThreshold());
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexDebugThreshold());
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexInfoThreshold());
assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexWarnThreshold());
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build()));
fail();
} catch (IllegalArgumentException ex) {
assertTimeValueException(ex, "index.indexing.slowlog.threshold.index.trace");
}
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build()));
fail();
} catch (IllegalArgumentException ex) {
assertTimeValueException(ex, "index.indexing.slowlog.threshold.index.debug");
}
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "NOT A TIME VALUE").build()));
fail();
} catch (IllegalArgumentException ex) {
assertTimeValueException(ex, "index.indexing.slowlog.threshold.index.info");
}
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "NOT A TIME VALUE").build()));
fail();
} catch (IllegalArgumentException ex) {
assertTimeValueException(ex, "index.indexing.slowlog.threshold.index.warn");
}
}
private void assertTimeValueException(final IllegalArgumentException e, final String key) {
final String expected = "illegal value can't update [" + key + "] from [-1] to [NOT A TIME VALUE]";
assertThat(e, hasToString(containsString(expected)));
assertNotNull(e.getCause());
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
final IllegalArgumentException cause = (IllegalArgumentException) e.getCause();
final String causeExpected =
"failed to parse setting [" + key + "] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized";
assertThat(cause, hasToString(containsString(causeExpected)));
}
private IndexMetaData newIndexMeta(String name, Settings indexSettings) {
Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(indexSettings)
.build();
IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build();
return metaData;
}
}
| jimczi/elasticsearch | core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java | Java | apache-2.0 | 13,647 |
// Copyright 2011 Google Inc. All Rights Reserved.
// This file was generated from .js source files by GYP. If you
// want to make changes to this file you should either change the
// javascript source files or the GYP script.
#include "src/v8.h"
#include "src/snapshot/natives.h"
#include "src/utils.h"
namespace v8 {
namespace internal {
static const char sources[] = { 10, 40, 102, 117, 110, 99, 116, 105, 111, 110, 32, 40, 103, 108, 111, 98, 97,
108, 44, 32, 98, 105, 110, 100, 105, 110, 103, 41, 32, 123, 10, 32, 32, 39, 117,
115, 101, 32, 115, 116, 114, 105, 99, 116, 39, 59, 10, 32, 32, 98, 105, 110,
100, 105, 110, 103, 46, 116, 101, 115, 116, 69, 120, 112, 101, 114, 105, 109,
101, 110, 116, 97, 108, 69, 120, 116, 114, 97, 83, 104, 111, 117, 108, 100, 82,
101, 116, 117, 114, 110, 84, 101, 110, 32, 61, 32, 102, 117, 110, 99, 116, 105,
111, 110, 32, 40, 41, 32, 123, 10, 32, 32, 32, 32, 114, 101, 116, 117, 114, 110,
32, 49, 48, 59, 10, 32, 32, 125, 59, 10, 32, 32, 98, 105, 110, 100, 105, 110,
103, 46, 116, 101, 115, 116, 69, 120, 112, 101, 114, 105, 109, 101, 110, 116,
97, 108, 69, 120, 116, 114, 97, 83, 104, 111, 117, 108, 100, 67, 97, 108, 108,
84, 111, 82, 117, 110, 116, 105, 109, 101, 32, 61, 32, 102, 117, 110, 99, 116,
105, 111, 110, 40, 41, 32, 123, 10, 32, 32, 32, 32, 114, 101, 116, 117, 114,
110, 32, 98, 105, 110, 100, 105, 110, 103, 46, 114, 117, 110, 116, 105, 109,
101, 40, 51, 41, 59, 10, 32, 32, 125, 59, 10, 125, 41, 10 };
template <>
int NativesCollection<EXPERIMENTAL_EXTRAS>::GetBuiltinsCount() {
return 1;
}
template <>
int NativesCollection<EXPERIMENTAL_EXTRAS>::GetDebuggerCount() {
return 0;
}
template <>
int NativesCollection<EXPERIMENTAL_EXTRAS>::GetIndex(const char* name) {
if (strcmp(name, "test-experimental-extra") == 0) return 0;
return -1;
}
template <>
Vector<const char> NativesCollection<EXPERIMENTAL_EXTRAS>::GetScriptSource(int index) {
if (index == 0) return Vector<const char>(sources + 0, 235);
return Vector<const char>("", 0);
}
template <>
Vector<const char> NativesCollection<EXPERIMENTAL_EXTRAS>::GetScriptName(int index) {
if (index == 0) return Vector<const char>("native test-experimental-extra.js", 33);
return Vector<const char>("", 0);
}
template <>
Vector<const char> NativesCollection<EXPERIMENTAL_EXTRAS>::GetScriptsSource() {
return Vector<const char>(sources, 235);
}
} // internal
} // v8
| weolar/miniblink49 | gen/v8_5_7/experimental-extras-libraries.cc | C++ | apache-2.0 | 2,451 |
import java.util.*;
class Test {
private Set<String> foo;
void test(Test t1, String s) {
t1.foo = new HashSet<>();
t1.foo.add(s);
}
} | siosio/intellij-community | plugins/IntentionPowerPak/test/com/siyeh/ipp/collections/to_mutable_collection/FieldAssignment_after.java | Java | apache-2.0 | 151 |
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.spockframework.runtime.condition;
public interface IObjectRendererService extends IObjectRenderer<Object> {
<T> void addRenderer(Class<T> type, IObjectRenderer<? super T> renderer);
}
| spockframework/spock | spock-core/src/main/java/org/spockframework/runtime/condition/IObjectRendererService.java | Java | apache-2.0 | 809 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.projectView.impl.nodes;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.projectView.PresentationData;
import com.intellij.ide.projectView.ProjectViewNode;
import com.intellij.ide.projectView.ViewSettings;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.libraries.LibraryEx;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryType;
import com.intellij.openapi.roots.libraries.PersistentLibraryKind;
import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.PlatformIcons;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class LibraryGroupNode extends ProjectViewNode<LibraryGroupElement> {
public LibraryGroupNode(Project project, @NotNull LibraryGroupElement value, ViewSettings viewSettings) {
super(project, value, viewSettings);
}
@Override
@NotNull
public Collection<AbstractTreeNode<?>> getChildren() {
Module module = getValue().getModule();
ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
List<AbstractTreeNode<?>> children = new ArrayList<>();
OrderEntry[] orderEntries = moduleRootManager.getOrderEntries();
for (final OrderEntry orderEntry : orderEntries) {
if (orderEntry instanceof LibraryOrderEntry) {
final LibraryOrderEntry libraryOrderEntry = (LibraryOrderEntry)orderEntry;
final Library library = libraryOrderEntry.getLibrary();
if (library == null) {
continue;
}
final String libraryName = library.getName();
if (libraryName == null || libraryName.length() == 0) {
addLibraryChildren(libraryOrderEntry, children, getProject(), this);
}
else {
children.add(new NamedLibraryElementNode(getProject(), new NamedLibraryElement(module, libraryOrderEntry), getSettings()));
}
}
else if (orderEntry instanceof JdkOrderEntry) {
final JdkOrderEntry jdkOrderEntry = (JdkOrderEntry)orderEntry;
final Sdk jdk = jdkOrderEntry.getJdk();
if (jdk != null) {
children.add(new NamedLibraryElementNode(getProject(), new NamedLibraryElement(module, jdkOrderEntry), getSettings()));
}
}
}
return children;
}
public static void addLibraryChildren(LibraryOrSdkOrderEntry entry, List<? super AbstractTreeNode<?>> children, Project project, ProjectViewNode node) {
final PsiManager psiManager = PsiManager.getInstance(project);
VirtualFile[] files =
entry instanceof LibraryOrderEntry ? getLibraryRoots((LibraryOrderEntry)entry) : entry.getRootFiles(OrderRootType.CLASSES);
for (final VirtualFile file : files) {
if (!file.isValid()) continue;
if (file.isDirectory()) {
final PsiDirectory psiDir = psiManager.findDirectory(file);
if (psiDir == null) {
continue;
}
children.add(new PsiDirectoryNode(project, psiDir, node.getSettings()));
}
else {
final PsiFile psiFile = psiManager.findFile(file);
if (psiFile == null) continue;
children.add(new PsiFileNode(project, psiFile, node.getSettings()));
}
}
}
@Override
public String getTestPresentation() {
return "Libraries";
}
@Override
public boolean contains(@NotNull VirtualFile file) {
final ProjectFileIndex index = ProjectRootManager.getInstance(getProject()).getFileIndex();
if (!index.isInLibrary(file)) {
return false;
}
return someChildContainsFile(file, false);
}
@Override
public void update(@NotNull PresentationData presentation) {
presentation.setPresentableText(IdeBundle.message("node.projectview.libraries"));
presentation.setIcon(PlatformIcons.LIBRARY_ICON);
}
@Override
public boolean canNavigate() {
return ProjectSettingsService.getInstance(myProject).canOpenModuleLibrarySettings();
}
@Override
public void navigate(final boolean requestFocus) {
Module module = getValue().getModule();
ProjectSettingsService.getInstance(myProject).openModuleLibrarySettings(module);
}
public static VirtualFile @NotNull [] getLibraryRoots(@NotNull LibraryOrderEntry orderEntry) {
Library library = orderEntry.getLibrary();
if (library == null) return VirtualFile.EMPTY_ARRAY;
OrderRootType[] rootTypes = LibraryType.DEFAULT_EXTERNAL_ROOT_TYPES;
if (library instanceof LibraryEx) {
if (((LibraryEx)library).isDisposed()) return VirtualFile.EMPTY_ARRAY;
PersistentLibraryKind<?> libKind = ((LibraryEx)library).getKind();
if (libKind != null) {
rootTypes = LibraryType.findByKind(libKind).getExternalRootTypes();
}
}
final ArrayList<VirtualFile> files = new ArrayList<>();
for (OrderRootType rootType : rootTypes) {
files.addAll(Arrays.asList(library.getFiles(rootType)));
}
return VfsUtilCore.toVirtualFileArray(files);
}
}
| siosio/intellij-community | platform/lang-impl/src/com/intellij/ide/projectView/impl/nodes/LibraryGroupNode.java | Java | apache-2.0 | 5,590 |
/**
* @fileoverview Rule to enforce spacing around embedded expressions of template strings
* @author Toru Nagashima
*/
"use strict";
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
const astUtils = require("./utils/ast-utils");
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
const OPEN_PAREN = /\$\{$/u;
const CLOSE_PAREN = /^\}/u;
//------------------------------------------------------------------------------
// Rule Definition
//------------------------------------------------------------------------------
module.exports = {
meta: {
type: "layout",
docs: {
description: "require or disallow spacing around embedded expressions of template strings",
category: "ECMAScript 6",
recommended: false,
url: "https://eslint.org/docs/rules/template-curly-spacing"
},
fixable: "whitespace",
schema: [
{ enum: ["always", "never"] }
],
messages: {
expectedBefore: "Expected space(s) before '}'.",
expectedAfter: "Expected space(s) after '${'.",
unexpectedBefore: "Unexpected space(s) before '}'.",
unexpectedAfter: "Unexpected space(s) after '${'."
}
},
create(context) {
const sourceCode = context.getSourceCode();
const always = context.options[0] === "always";
const prefix = always ? "expected" : "unexpected";
/**
* Checks spacing before `}` of a given token.
* @param {Token} token A token to check. This is a Template token.
* @returns {void}
*/
function checkSpacingBefore(token) {
const prevToken = sourceCode.getTokenBefore(token);
if (prevToken &&
CLOSE_PAREN.test(token.value) &&
astUtils.isTokenOnSameLine(prevToken, token) &&
sourceCode.isSpaceBetweenTokens(prevToken, token) !== always
) {
context.report({
loc: token.loc.start,
messageId: `${prefix}Before`,
fix(fixer) {
if (always) {
return fixer.insertTextBefore(token, " ");
}
return fixer.removeRange([
prevToken.range[1],
token.range[0]
]);
}
});
}
}
/**
* Checks spacing after `${` of a given token.
* @param {Token} token A token to check. This is a Template token.
* @returns {void}
*/
function checkSpacingAfter(token) {
const nextToken = sourceCode.getTokenAfter(token);
if (nextToken &&
OPEN_PAREN.test(token.value) &&
astUtils.isTokenOnSameLine(token, nextToken) &&
sourceCode.isSpaceBetweenTokens(token, nextToken) !== always
) {
context.report({
loc: {
line: token.loc.end.line,
column: token.loc.end.column - 2
},
messageId: `${prefix}After`,
fix(fixer) {
if (always) {
return fixer.insertTextAfter(token, " ");
}
return fixer.removeRange([
token.range[1],
nextToken.range[0]
]);
}
});
}
}
return {
TemplateElement(node) {
const token = sourceCode.getFirstToken(node);
checkSpacingBefore(token);
checkSpacingAfter(token);
}
};
}
};
| BigBoss424/portfolio | v8/development/node_modules/gatsby/node_modules/eslint/lib/rules/template-curly-spacing.js | JavaScript | apache-2.0 | 4,149 |
# Copyright (c) 2017 Keith Ito
""" from https://github.com/keithito/tacotron """
'''
Defines the set of symbols used in text input to the model.
The default is a set of ASCII characters that works well for English or text that has been run through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details. '''
from . import cmudict
_pad = '_'
_punctuation = '!\'(),.:;? '
_special = '-'
_letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters):
_arpabet = ['@' + s for s in cmudict.valid_symbols]
# Export all symbols:
symbols = [_pad] + list(_special) + list(_punctuation) + list(_letters) + _arpabet
| mlperf/inference_results_v0.7 | open/Inspur/code/rnnt/tensorrt/preprocessing/parts/text/symbols.py | Python | apache-2.0 | 749 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
var server = {};
(function (server) {
var USER = 'server.user';
var log = new Log();
/**
* Returns the currently logged in user
*/
server.current = function (session, user) {
if (arguments.length > 1) {
session.put(USER, user);
return user;
}
return session.get(USER);
};
}(server));
| hsbhathiya/stratos | components/org.apache.stratos.manager.console/modules/console/scripts/server.js | JavaScript | apache-2.0 | 1,188 |
package graph
import (
"fmt"
"io"
"runtime"
"time"
"github.com/Sirupsen/logrus"
"github.com/docker/docker/api/types"
"github.com/docker/docker/utils"
)
// Lookup looks up an image by name in a TagStore and returns it as an
// ImageInspect structure.
func (s *TagStore) Lookup(name string) (*types.ImageInspect, error) {
image, err := s.LookupImage(name)
if err != nil || image == nil {
return nil, fmt.Errorf("No such image: %s", name)
}
var tags = make([]string, 0)
s.Lock()
for repoName, repository := range s.Repositories {
for ref, id := range repository {
if id == image.ID {
imgRef := utils.ImageReference(repoName, ref)
tags = append(tags, imgRef)
}
}
}
s.Unlock()
imageInspect := &types.ImageInspect{
ID: image.ID,
Tags: tags,
Parent: image.Parent,
Comment: image.Comment,
Created: image.Created.Format(time.RFC3339Nano),
Container: image.Container,
ContainerConfig: &image.ContainerConfig,
DockerVersion: image.DockerVersion,
Author: image.Author,
Config: image.Config,
Architecture: image.Architecture,
Os: image.OS,
Size: image.Size,
VirtualSize: s.graph.GetParentsSize(image) + image.Size,
}
imageInspect.GraphDriver.Name = s.graph.driver.String()
graphDriverData, err := s.graph.driver.GetMetadata(image.ID)
if err != nil {
return nil, err
}
imageInspect.GraphDriver.Data = graphDriverData
return imageInspect, nil
}
// ImageTarLayer return the tarLayer of the image
func (s *TagStore) ImageTarLayer(name string, dest io.Writer) error {
if image, err := s.LookupImage(name); err == nil && image != nil {
// On Windows, the base layer cannot be exported
if runtime.GOOS != "windows" || image.Parent != "" {
fs, err := s.graph.TarLayer(image)
if err != nil {
return err
}
defer fs.Close()
written, err := io.Copy(dest, fs)
if err != nil {
return err
}
logrus.Debugf("rendered layer for %s of [%d] size", image.ID, written)
}
return nil
}
return fmt.Errorf("No such image: %s", name)
}
| wcwxyz/docker | graph/service.go | GO | apache-2.0 | 2,129 |
/// <reference path='fourslash.ts' />
// @Filename: goToTypeDefinitioAliases_module1.ts
/////*definition*/interface I {
//// p;
////}
////export {I as I2};
// @Filename: goToTypeDefinitioAliases_module2.ts
////import {I2 as I3} from "./goToTypeDefinitioAliases_module1";
////var v1: I3;
////export {v1 as v2};
// @Filename: goToTypeDefinitioAliases_module3.ts
////import {/*reference1*/v2 as v3} from "./goToTypeDefinitioAliases_module2";
/////*reference2*/v3;
goTo.marker('reference1');
goTo.type();
verify.caretAtMarker('definition');
goTo.marker('reference2');
goTo.type();
verify.caretAtMarker('definition');
| plantain-00/TypeScript | tests/cases/fourslash/goToTypeDefinitionAliases.ts | TypeScript | apache-2.0 | 645 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_espn_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(5)
class SkiaEspnDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEspnDesktopPageSet, self).__init__(
archive_data_file='data/skia_espn_desktop.json')
urls_list = [
# Why: #1 sports.
'http://espn.go.com',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
| HalCanary/skia-hc | tools/skp/page_sets/skia_espn_desktop.py | Python | bsd-3-clause | 1,170 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <string>
#include "chrome/app/chrome_command_ids.h"
#include "chrome/browser/content_settings/cookie_settings_factory.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/tab_dialogs.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/common/url_constants.h"
#include "chrome/test/base/in_process_browser_test.h"
#include "chrome/test/base/ui_test_utils.h"
#include "components/content_settings/core/browser/cookie_settings.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
typedef InProcessBrowserTest CollectedCookiesTest;
// If this crashes on Windows, use http://crbug.com/79331
IN_PROC_BROWSER_TEST_F(CollectedCookiesTest, DoubleDisplay) {
ASSERT_TRUE(embedded_test_server()->InitializeAndWaitUntilReady());
// Disable cookies.
CookieSettingsFactory::GetForProfile(browser()->profile())
->SetDefaultCookieSetting(CONTENT_SETTING_BLOCK);
// Load a page with cookies.
ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL("/cookie1.html"));
// Click on the info link twice.
content::WebContents* web_contents =
browser()->tab_strip_model()->GetActiveWebContents();
TabDialogs::FromWebContents(web_contents)->ShowCollectedCookies();
TabDialogs::FromWebContents(web_contents)->ShowCollectedCookies();
}
// If this crashes on Windows, use http://crbug.com/79331
IN_PROC_BROWSER_TEST_F(CollectedCookiesTest, NavigateAway) {
ASSERT_TRUE(embedded_test_server()->InitializeAndWaitUntilReady());
// Disable cookies.
CookieSettingsFactory::GetForProfile(browser()->profile())
->SetDefaultCookieSetting(CONTENT_SETTING_BLOCK);
// Load a page with cookies.
ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL("/cookie1.html"));
// Click on the info link.
content::WebContents* web_contents =
browser()->tab_strip_model()->GetActiveWebContents();
TabDialogs::FromWebContents(web_contents)->ShowCollectedCookies();
// Navigate to another page.
ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL("/cookie2.html"));
}
| SaschaMester/delicium | chrome/browser/collected_cookies_browsertest.cc | C++ | bsd-3-clause | 2,289 |
var get = Ember.get;
Ember._ResolvedState = Ember.Object.extend({
manager: null,
state: null,
match: null,
object: Ember.computed(function(key) {
if (this._object) {
return this._object;
} else {
var state = get(this, 'state'),
match = get(this, 'match'),
manager = get(this, 'manager');
return state.deserialize(manager, match.hash);
}
}),
hasPromise: Ember.computed(function() {
return Ember.canInvoke(get(this, 'object'), 'then');
}).property('object'),
promise: Ember.computed(function() {
var object = get(this, 'object');
if (Ember.canInvoke(object, 'then')) {
return object;
} else {
return {
then: function(success) { success(object); }
};
}
}).property('object'),
transition: function() {
var manager = get(this, 'manager'),
path = get(this, 'state.path'),
object = get(this, 'object');
manager.transitionTo(path, object);
}
});
| teddyzeenny/ember.js | packages/ember-old-router/lib/resolved_state.js | JavaScript | mit | 985 |
/*
Project Orleans Cloud Service SDK ver. 1.0
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the ""Software""), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using Orleans;
using Orleans.Concurrency;
using System.Threading.Tasks;
namespace TwitterGrainInterfaces
{
/// <summary>
/// A grain to act as the API into Orleans, and fan out read/writes to multiple hashtag grains
/// </summary>
public interface ITweetDispatcherGrain : IGrainWithIntegerKey
{
Task AddScore(int score, string[] hashtags, string tweet);
Task<Totals[]> GetTotals(string[] hashtags);
}
}
| TedDBarr/orleans | Samples/TwitterSentiment/TwitterGrainInterfaces/ITweetGrain.cs | C# | mit | 1,600 |
require('mocha');
require('should');
var assert = require('assert');
var support = require('./support');
var App = support.resolve();
var app;
describe('app.option', function() {
beforeEach(function() {
app = new App();
});
it('should set a key-value pair on options:', function() {
app.option('a', 'b');
assert(app.options.a === 'b');
});
it('should set an object on options:', function() {
app.option({c: 'd'});
assert(app.options.c === 'd');
});
it('should set an option.', function() {
app.option('a', 'b');
app.options.should.have.property('a');
});
it('should get an option.', function() {
app.option('a', 'b');
app.option('a').should.equal('b');
});
it('should extend the `options` object.', function() {
app.option({x: 'xxx', y: 'yyy', z: 'zzz'});
app.option('x').should.equal('xxx');
app.option('y').should.equal('yyy');
app.option('z').should.equal('zzz');
});
it('options should be on the `options` object.', function() {
app.option({x: 'xxx', y: 'yyy', z: 'zzz'});
app.options.x.should.equal('xxx');
app.options.y.should.equal('yyy');
app.options.z.should.equal('zzz');
});
it('should be chainable.', function() {
app.option({x: 'xxx', y: 'yyy', z: 'zzz'});
app.option({a: 'aaa', b: 'bbb', c: 'ccc'});
app.option('x').should.equal('xxx');
app.option('a').should.equal('aaa');
});
it('should extend the `options` object when the first param is a string.', function() {
app.option('foo', {x: 'xxx', y: 'yyy', z: 'zzz'});
app.option('bar', {a: 'aaa', b: 'bbb', c: 'ccc'});
app.option('foo').should.have.property('x');
app.option('bar').should.have.property('a');
app.options.foo.should.have.property('x');
app.options.bar.should.have.property('a');
});
it('should set an option.', function() {
app.option('a', 'b');
app.options.should.have.property('a');
});
it('should get an option.', function() {
app.option('a', 'b');
app.option('a').should.equal('b');
});
it('should extend the `options` object.', function() {
app.option({x: 'xxx', y: 'yyy', z: 'zzz'});
app.option('x').should.equal('xxx');
app.option('y').should.equal('yyy');
app.option('z').should.equal('zzz');
});
it('options should be on the `options` object.', function() {
app.option({x: 'xxx', y: 'yyy', z: 'zzz'});
app.options.x.should.equal('xxx');
app.options.y.should.equal('yyy');
app.options.z.should.equal('zzz');
});
it('should be chainable.', function() {
app
.option({x: 'xxx', y: 'yyy', z: 'zzz'})
.option({a: 'aaa', b: 'bbb', c: 'ccc'});
app.option('x').should.equal('xxx');
app.option('a').should.equal('aaa');
});
});
| Gargitier5/tier5portal | vendors/update/test/app.option.js | JavaScript | mit | 2,760 |
# encoding: utf-8
module Mongoid #:nodoc:
module Extensions #:nodoc:
module Boolean #:nodoc:
module Conversions #:nodoc:
def set(value)
val = value.to_s
val == "true" || val == "1"
end
def get(value)
value
end
end
end
end
end
| listrophy/mongoid | lib/mongoid/extensions/boolean/conversions.rb | Ruby | mit | 312 |
<?php
class Foo extends Bar {
public function bar($foobar = array(parent::FOOBAR)) {}
}
?>
| sowbiba/senegal-front | vendor/pdepend/pdepend/src/test/resources/files/Parser/testParserHandlesParentKeywordInMethodParameterDefaultValue.php | PHP | mit | 95 |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
export default [
[
['mn.', 'mg.', 'fm.', 'em.', 'kv.', 'nt.'],
['midn.', 'morg.', 'form.', 'etterm.', 'kveld', 'natt'],
['midnatt', 'morgenen', 'formiddagen', 'ettermiddagen', 'kvelden', 'natten']
],
[
['mn.', 'mg.', 'fm.', 'em.', 'kv.', 'nt.'],
['midn.', 'morg.', 'form.', 'etterm.', 'kveld', 'natt'],
['midnatt', 'morgen', 'formiddag', 'ettermiddag', 'kveld', 'natt']
],
[
'00:00', ['06:00', '10:00'], ['10:00', '12:00'], ['12:00', '18:00'], ['18:00', '24:00'],
['00:00', '06:00']
]
];
| mgechev/angular | packages/common/locales/extra/nb.ts | TypeScript | mit | 848 |
<?php
/**
* Settings screen sidebar for free plugins with a pro version. Display the reasons to upgrade
* and the mailing list.
*/
?>
<!-- Keep Updated -->
<div class="postbox">
<div class="handlediv" title="Click to toggle"><br /></div>
<h3 class="hndle"><span><?php _e('Keep Updated', $this->plugin->name); ?></span></h3>
<div class="option">
<p class="description"><?php _e('Subscribe to the newsletter and receive updates on our WordPress Plugins', $this->plugin->name); ?>.</p>
</div>
<form action="http://n7studios.createsend.com/t/r/s/jdutdyj/" method="post">
<div class="option">
<p>
<strong><?php _e('Email', $this->plugin->name); ?></strong>
<input id="fieldEmail" name="cm-jdutdyj-jdutdyj" type="email" required />
</p>
</div>
<div class="option">
<p>
<input type="submit" name="submit" value="<?php _e('Subscribe', $this->plugin->name); ?>" class="button button-primary" />
</p>
</div>
</form>
</div> | TheOrchardSolutions/WordPress | wp-content/plugins/wp-to-buffer/_modules/dashboard/views/sidebar-upgrade.php | PHP | gpl-2.0 | 1,049 |
// SPDX-License-Identifier: GPL-2.0-or-later
package org.dolphinemu.dolphinemu.adapters;
import android.content.res.Resources;
import android.view.ViewGroup;
import androidx.leanback.widget.ImageCardView;
import androidx.leanback.widget.Presenter;
import org.dolphinemu.dolphinemu.model.TvSettingsItem;
import org.dolphinemu.dolphinemu.viewholders.TvSettingsViewHolder;
public final class SettingsRowPresenter extends Presenter
{
public Presenter.ViewHolder onCreateViewHolder(ViewGroup parent)
{
// Create a new view.
ImageCardView settingsCard = new ImageCardView(parent.getContext());
settingsCard.setMainImageAdjustViewBounds(true);
settingsCard.setMainImageDimensions(192, 160);
settingsCard.setFocusable(true);
settingsCard.setFocusableInTouchMode(true);
// Use that view to create a ViewHolder.
return new TvSettingsViewHolder(settingsCard);
}
public void onBindViewHolder(Presenter.ViewHolder viewHolder, Object item)
{
TvSettingsViewHolder holder = (TvSettingsViewHolder) viewHolder;
TvSettingsItem settingsItem = (TvSettingsItem) item;
Resources resources = holder.cardParent.getResources();
holder.itemId = settingsItem.getItemId();
holder.cardParent.setTitleText(resources.getString(settingsItem.getLabelId()));
holder.cardParent.setMainImage(resources.getDrawable(settingsItem.getIconId(), null));
}
public void onUnbindViewHolder(Presenter.ViewHolder viewHolder)
{
// no op
}
}
| ZephyrSurfer/dolphin | Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/adapters/SettingsRowPresenter.java | Java | gpl-2.0 | 1,484 |
/* ScummVM - Graphic Adventure Engine
*
* ScummVM is the legal property of its developers, whose names
* are too numerous to list here. Please refer to the COPYRIGHT
* file distributed with this source distribution.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
*/
/*
* This file is based on WME Lite.
* http://dead-code.org/redir.php?target=wmelite
* Copyright (c) 2011 Jan Nedoma
*/
#include "engines/wintermute/base/base_game.h"
#include "engines/wintermute/base/base_scriptable.h"
#include "engines/wintermute/base/scriptables/script.h"
#include "engines/wintermute/base/scriptables/script_value.h"
#include "engines/wintermute/base/scriptables/script_stack.h"
namespace Wintermute {
bool EmulateHTTPConnectExternalCalls(BaseGame *inGame, ScStack *stack, ScStack *thisStack, ScScript::TExternalFunction *function) {
//////////////////////////////////////////////////////////////////////////
// Register
// Used to register license key online at Pizza Morgana: Episode 1 - Monsters and Manipulations in the Magical Forest
// Specification: external "httpconnect.dll" cdecl long Register(string, long, string, long)
// Known usage: Register(<productId>, 65535, <productKey>, 65535)
// Known product ID values are: "357868", "353058" and "353006"
// Known action: HTTP GET http://keygen.corbomitegames.com/keygen/validateKey.php?action=REGISTER&productId=productId&key=productKey
// Returns 1 on success
// Returns 0 on firewall error
// Returns -1 on invalid product key
// Returns -2 on invalid product ID
// Returns -3 on expired product key
// Returns -4 on invalid machine ID
// Returns -5 on number of installations exceeded
// Returns -6 on socket error
// Returns -7 on no internet connection
// Returns -8 on connection reset
// Returns -11 on validation temporary unavaliable
// Returns -12 on validation error
// For some reason always returns -7 for me in a test game
//////////////////////////////////////////////////////////////////////////
if (strcmp(function->name, "Register") == 0) {
stack->correctParams(4);
const char *productId = stack->pop()->getString();
int productIdMaxLen = stack->pop()->getInt();
const char *productKey = stack->pop()->getString();
int productKeyMaxLen = stack->pop()->getInt();
warning("Register(\"%s\",%d,\"%s\",%d) is not implemented", productId , productIdMaxLen, productKey, productKeyMaxLen);
stack->pushInt(-7); // "no internet connection" error
return STATUS_OK;
}
//////////////////////////////////////////////////////////////////////////
// Validate
// Used to validate something at Pizza Morgana: Episode 1 - Monsters and Manipulations in the Magical Forest
// Specification: external "httpconnect.dll" cdecl long Validate()
// Known usage: Validate()
// Known action: HTTP GET http://keygen.corbomitegames.com/keygen/validateKey.php?action=VALIDATE&productId=Ar&key=Ar
// Used only when Debug mode is active or game is started with "INVALID" cmdline parameter
// For some reason always returns 1 for me in a test game
//////////////////////////////////////////////////////////////////////////
else if (strcmp(function->name, "Validate") == 0) {
stack->correctParams(0);
// do nothing
stack->pushInt(1);
return STATUS_OK;
}
//////////////////////////////////////////////////////////////////////////
// SendHTTPAsync
// Used to send game progress events to server at Pizza Morgana: Episode 1 - Monsters and Manipulations in the Magical Forest
// Specification: external "httpconnect.dll" cdecl long SendHTTPAsync(string, long, string, long, string, long)
// Known usage: SendHTTPAsync("backend.pizzamorgana.com", 65535, <FullURL>, 65535, <Buffer?!>, 65535)
// FullURL is formed as "http://backend.pizzamorgana.com/event.php?Event=<EventName>&player=<PlayerName>&extraParams=<ExtraParams>&SN=<ProductKey>&Episode=1&GameTime=<CurrentTime>&UniqueID=<UniqueId>"
// Known EventName values are: "GameStart", "ChangeGoal", "EndGame" and "QuitGame"
// Known ExtraParams values are: "ACT0", "ACT1", "ACT2", "ACT3", "ACT4", "Ep0FindFood", "Ep0FindCellMenu", "Ep0BroRoom", "Ep0FindKey", "Ep0FindCellMenuKey", "Ep0FindMenuKey", "Ep0FindCell", "Ep0FindMenu", "Ep0OrderPizza", "Ep0GetRidOfVamp", "Ep0GetVampAttention", "Ep0License"
// Return value is never used
//////////////////////////////////////////////////////////////////////////
else if (strcmp(function->name, "SendHTTPAsync") == 0) {
stack->correctParams(6);
const char *server = stack->pop()->getString();
int serverMaxLen = stack->pop()->getInt();
const char *fullUrl = stack->pop()->getString();
int fullUrlMaxLen = stack->pop()->getInt();
const char *param5 = stack->pop()->getString();
int param5MaxLen = stack->pop()->getInt();
// TODO: Maybe parse URL and call some Achievements API using ExtraParams values in some late future
warning("SendHTTPAsync(\"%s\",%d,\"%s\",%d,\"%s\",%d) is not implemented", server, serverMaxLen, fullUrl, fullUrlMaxLen, param5, param5MaxLen);
stack->pushInt(0);
return STATUS_OK;
}
//////////////////////////////////////////////////////////////////////////
// SendRecvHTTP (6 params variant)
// Declared at Pizza Morgana: Episode 1 - Monsters and Manipulations in the Magical Forest
// Seems to be unused, probably SendRecvHTTP was initially used instead of SendHTTPAsync
// Specification: external "httpconnect.dll" cdecl long SendRecvHTTP(string, long, string, long, string, long)
// Always returns -7 for me in a test game, probably returns the same network errors as Register()
//////////////////////////////////////////////////////////////////////////
else if (strcmp(function->name, "SendRecvHTTP") == 0 && function->nu_params == 6) {
stack->correctParams(6);
const char *server = stack->pop()->getString();
int serverMaxLen = stack->pop()->getInt();
const char *fullUrl = stack->pop()->getString();
int fullUrlMaxLen = stack->pop()->getInt();
const char *param5 = stack->pop()->getString();
int param5MaxLen = stack->pop()->getInt();
warning("SendRecvHTTP(\"%s\",%d,\"%s\",%d,\"%s\",%d) is not implemented", server, serverMaxLen, fullUrl, fullUrlMaxLen, param5, param5MaxLen);
stack->pushInt(-7); // "no internet connection" error
return STATUS_OK;
}
//////////////////////////////////////////////////////////////////////////
// SendRecvHTTP (4 params variant)
// Used to call HTTP methods at Zbang! The Game
// Specification: external "httpconnect.dll" cdecl long SendRecvHTTP(string, long, string, long)
// Known usage: SendRecvHTTP("scoresshort.php?player=<PlayerName>", 65535, <Buffer>, 65535)
// Known usage: SendRecvHTTP("/update.php?player=<PlayerName>&difficulty=<Difficulty>&items=<CommaSeparatedItemList>", 65535, <Buffer>, 65535)
// My Zbang demo does not have this dll, so there is no way to actually test it with a test game
// Return value is never used in Zbang scripts
//////////////////////////////////////////////////////////////////////////
else if (strcmp(function->name, "SendRecvHTTP") == 0 && function->nu_params == 4) {
stack->correctParams(4);
const char *dirUrl = stack->pop()->getString();
int dirUrlMaxLen = stack->pop()->getInt();
/*ScValue *buf =*/ stack->pop();
int bufMaxLen = stack->pop()->getInt();
//TODO: Count items and give scores, persist those values
warning("SendRecvHTTP(\"%s\",%d,buf,%d) is not implemented", dirUrl, dirUrlMaxLen, bufMaxLen);
stack->pushInt(0);
return STATUS_OK;
}
return STATUS_FAILED;
}
} // End of namespace Wintermute
| somaen/scummvm | engines/wintermute/ext/dll_httpconnect.cpp | C++ | gpl-2.0 | 8,196 |
// errorcheck
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Issue 20227: panic while constructing constant "1i/1e-600000000"
package p
var _ = 1 / 1e-600000000i // ERROR "division by zero"
var _ = 1i / 1e-600000000 // ERROR "division by zero"
var _ = 1i / 1e-600000000i // ERROR "division by zero"
var _ = 1 / (1e-600000000 + 1e-600000000i) // ERROR "division by zero"
var _ = 1i / (1e-600000000 + 1e-600000000i) // ERROR "division by zero"
| Gurgel100/gcc | gcc/testsuite/go.test/test/fixedbugs/issue20227.go | GO | gpl-2.0 | 565 |
<?php
// $Id: panels-dashboard-link.tpl.php,v 1.3 2010/10/11 22:56:02 sdboyer Exp $
?>
<div class="dashboard-entry clearfix">
<div class="dashboard-text">
<div class="dashboard-link">
<?php print $link['title']; ?>
</div>
<div class="description">
<?php print $link['description']; ?>
</div>
</div>
</div>
| TransmissionStudios/Transmission | sites/default/modules/panels/templates/panels-dashboard-link.tpl.php | PHP | gpl-2.0 | 338 |
tinyMCE.addI18n('ka.wordcount',{words:"Words: "}); | freaxmind/miage-l3 | web/blog Tim Burton/tim_burton/protected/extensions/tinymce/assets/tiny_mce/plugins/wordc/langs/ka_dlg.js | JavaScript | gpl-3.0 | 50 |
/*
* Cppcheck - A tool for static C/C++ code analysis
* Copyright (C) 2007-2015 Daniel Marjamäki and Cppcheck team.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <QObject>
#include <QString>
#include <QList>
#include <QDir>
#include <QXmlStreamWriter>
#include <QDebug>
#include "report.h"
#include "erroritem.h"
#include "xmlreport.h"
#include "xmlreportv1.h"
static const char ResultElementName[] = "results";
static const char ErrorElementName[] = "error";
static const char FilenameAttribute[] = "file";
static const char LineAttribute[] = "line";
static const char IdAttribute[] = "id";
static const char SeverityAttribute[] = "severity";
static const char MsgAttribute[] = "msg";
XmlReportV1::XmlReportV1(const QString &filename) :
XmlReport(filename),
mXmlReader(NULL),
mXmlWriter(NULL)
{
}
XmlReportV1::~XmlReportV1()
{
delete mXmlReader;
delete mXmlWriter;
}
bool XmlReportV1::Create()
{
if (Report::Create()) {
mXmlWriter = new QXmlStreamWriter(Report::GetFile());
return true;
}
return false;
}
bool XmlReportV1::Open()
{
if (Report::Open()) {
mXmlReader = new QXmlStreamReader(Report::GetFile());
return true;
}
return false;
}
void XmlReportV1::WriteHeader()
{
mXmlWriter->setAutoFormatting(true);
mXmlWriter->writeStartDocument();
mXmlWriter->writeStartElement(ResultElementName);
}
void XmlReportV1::WriteFooter()
{
mXmlWriter->writeEndElement();
mXmlWriter->writeEndDocument();
}
void XmlReportV1::WriteError(const ErrorItem &error)
{
/*
Error example from the core program in xml
<error file="gui/test.cpp" line="14" id="mismatchAllocDealloc" severity="error" msg="Mismatching allocation and deallocation: k"/>
The callstack seems to be ignored here as well, instead last item of the stack is used
*/
// Don't write inconclusive errors to XML V1
if (error.inconclusive)
return;
mXmlWriter->writeStartElement(ErrorElementName);
QString file = QDir::toNativeSeparators(error.files[error.files.size() - 1]);
file = XmlReport::quoteMessage(file);
mXmlWriter->writeAttribute(FilenameAttribute, file);
const QString line = QString::number(error.lines[error.lines.size() - 1]);
mXmlWriter->writeAttribute(LineAttribute, line);
mXmlWriter->writeAttribute(IdAttribute, error.errorId);
// Don't localize severity so we can read these files
mXmlWriter->writeAttribute(SeverityAttribute, GuiSeverity::toString(error.severity));
const QString message = XmlReport::quoteMessage(error.message);
mXmlWriter->writeAttribute(MsgAttribute, message);
mXmlWriter->writeEndElement();
}
QList<ErrorItem> XmlReportV1::Read()
{
QList<ErrorItem> errors;
bool insideResults = false;
if (!mXmlReader) {
qDebug() << "You must Open() the file before reading it!";
return errors;
}
while (!mXmlReader->atEnd()) {
switch (mXmlReader->readNext()) {
case QXmlStreamReader::StartElement:
if (mXmlReader->name() == ResultElementName)
insideResults = true;
// Read error element from inside result element
if (insideResults && mXmlReader->name() == ErrorElementName) {
ErrorItem item = ReadError(mXmlReader);
errors.append(item);
}
break;
case QXmlStreamReader::EndElement:
if (mXmlReader->name() == ResultElementName)
insideResults = false;
break;
// Not handled
case QXmlStreamReader::NoToken:
case QXmlStreamReader::Invalid:
case QXmlStreamReader::StartDocument:
case QXmlStreamReader::EndDocument:
case QXmlStreamReader::Characters:
case QXmlStreamReader::Comment:
case QXmlStreamReader::DTD:
case QXmlStreamReader::EntityReference:
case QXmlStreamReader::ProcessingInstruction:
break;
}
}
return errors;
}
ErrorItem XmlReportV1::ReadError(QXmlStreamReader *reader)
{
ErrorItem item;
if (reader->name().toString() == ErrorElementName) {
QXmlStreamAttributes attribs = reader->attributes();
QString file = attribs.value("", FilenameAttribute).toString();
file = XmlReport::unquoteMessage(file);
item.file = file;
item.files.push_back(file);
const int line = attribs.value("", LineAttribute).toString().toUInt();
item.lines.push_back(line);
item.errorId = attribs.value("", IdAttribute).toString();
item.severity = GuiSeverity::fromString(attribs.value("", SeverityAttribute).toString());
// NOTE: This duplicates the message to Summary-field. But since
// old XML format doesn't have separate summary and verbose messages
// we must add same message to both data so it shows up in GUI.
// Check if there is full stop and cut the summary to it.
QString summary = attribs.value("", MsgAttribute).toString();
const int ind = summary.indexOf('.');
if (ind != -1)
summary = summary.left(ind + 1);
item.summary = XmlReport::unquoteMessage(summary);
QString message = attribs.value("", MsgAttribute).toString();
item.message = XmlReport::unquoteMessage(message);
}
return item;
}
| SkyroverTech/SkyroverCF | lib/cppcheck-1.71/gui/xmlreportv1.cpp | C++ | gpl-3.0 | 5,973 |
// Copyright (c) AlphaSierraPapa for the SharpDevelop Team (for details please see \doc\copyright.txt)
// This code is distributed under the GNU LGPL (for details please see \doc\license.txt)
using System;
using System.Collections.Generic;
namespace ICSharpCode.ILSpy.Debugger.Services
{
/// <summary>
/// Very naive parser.
/// </summary>
static class ParserService
{
static HashSet<string> mySet = new HashSet<string>();
static ParserService()
{
mySet.AddRange((new string [] {
".",
"{",
"}",
"(",
")",
"[",
"]",
" ",
"=",
"+",
"-",
"/",
"%",
"*",
"&",
Environment.NewLine,
";",
",",
"~",
"!",
"?",
@"\n",
@"\t",
@"\r",
"|"
}));
}
static void AddRange<T>(this ICollection<T> list, IEnumerable<T> items)
{
foreach (T item in items)
if (!list.Contains(item))
list.Add(item);
}
/// <summary>
/// Returns the variable name
/// </summary>
/// <param name="fullText"></param>
/// <param name="offset"></param>
/// <returns></returns>
public static string SimpleParseAt(string fullText, int offset)
{
if (string.IsNullOrEmpty(fullText))
return string.Empty;
if (offset <= 0 || offset >= fullText.Length)
return string.Empty;
string currentValue = fullText[offset].ToString();
if (mySet.Contains(currentValue))
return string.Empty;
int left = offset, right = offset;
//search left
while((!mySet.Contains(currentValue) || currentValue == ".") && left > 0)
currentValue = fullText[--left].ToString();
currentValue = fullText[offset].ToString();
// searh right
while(!mySet.Contains(currentValue) && right < fullText.Length - 2)
currentValue = fullText[++right].ToString();
return fullText.Substring(left + 1, right - 1 - left).Trim();
}
}
}
| damnya/dnSpy | ILSpy/Services/ParserService.cs | C# | gpl-3.0 | 2,300 |
// This file has been generated by Py++.
#include "boost/python.hpp"
#include "generators/include/python_CEGUI.h"
#include "DefaultLogger.pypp.hpp"
namespace bp = boost::python;
struct DefaultLogger_wrapper : CEGUI::DefaultLogger, bp::wrapper< CEGUI::DefaultLogger > {
DefaultLogger_wrapper( )
: CEGUI::DefaultLogger( )
, bp::wrapper< CEGUI::DefaultLogger >(){
// null constructor
}
virtual void logEvent( ::CEGUI::String const & message, ::CEGUI::LoggingLevel level=::CEGUI::Standard ) {
if( bp::override func_logEvent = this->get_override( "logEvent" ) )
func_logEvent( boost::ref(message), level );
else{
this->CEGUI::DefaultLogger::logEvent( boost::ref(message), level );
}
}
void default_logEvent( ::CEGUI::String const & message, ::CEGUI::LoggingLevel level=::CEGUI::Standard ) {
CEGUI::DefaultLogger::logEvent( boost::ref(message), level );
}
virtual void setLogFilename( ::CEGUI::String const & filename, bool append=false ) {
if( bp::override func_setLogFilename = this->get_override( "setLogFilename" ) )
func_setLogFilename( boost::ref(filename), append );
else{
this->CEGUI::DefaultLogger::setLogFilename( boost::ref(filename), append );
}
}
void default_setLogFilename( ::CEGUI::String const & filename, bool append=false ) {
CEGUI::DefaultLogger::setLogFilename( boost::ref(filename), append );
}
};
void register_DefaultLogger_class(){
{ //::CEGUI::DefaultLogger
typedef bp::class_< DefaultLogger_wrapper, bp::bases< CEGUI::Logger >, boost::noncopyable > DefaultLogger_exposer_t;
DefaultLogger_exposer_t DefaultLogger_exposer = DefaultLogger_exposer_t( "DefaultLogger", "*!\n\
\n\
Default implementation for the Logger class.\n\
If you want to redirect CEGUI logs to some place other than a text file,\n\
implement your own Logger implementation and create a object of the\n\
Logger type before creating the CEGUI.System singleton.\n\
*\n", bp::no_init );
bp::scope DefaultLogger_scope( DefaultLogger_exposer );
DefaultLogger_exposer.def( bp::init< >() );
{ //::CEGUI::DefaultLogger::logEvent
typedef void ( ::CEGUI::DefaultLogger::*logEvent_function_type )( ::CEGUI::String const &,::CEGUI::LoggingLevel ) ;
typedef void ( DefaultLogger_wrapper::*default_logEvent_function_type )( ::CEGUI::String const &,::CEGUI::LoggingLevel ) ;
DefaultLogger_exposer.def(
"logEvent"
, logEvent_function_type(&::CEGUI::DefaultLogger::logEvent)
, default_logEvent_function_type(&DefaultLogger_wrapper::default_logEvent)
, ( bp::arg("message"), bp::arg("level")=::CEGUI::Standard ) );
}
{ //::CEGUI::DefaultLogger::setLogFilename
typedef void ( ::CEGUI::DefaultLogger::*setLogFilename_function_type )( ::CEGUI::String const &,bool ) ;
typedef void ( DefaultLogger_wrapper::*default_setLogFilename_function_type )( ::CEGUI::String const &,bool ) ;
DefaultLogger_exposer.def(
"setLogFilename"
, setLogFilename_function_type(&::CEGUI::DefaultLogger::setLogFilename)
, default_setLogFilename_function_type(&DefaultLogger_wrapper::default_setLogFilename)
, ( bp::arg("filename"), bp::arg("append")=(bool)(false) ) );
}
}
}
| geminy/aidear | oss/cegui/cegui-0.8.7/cegui/src/ScriptModules/Python/bindings/output/CEGUI/DefaultLogger.pypp.cpp | C++ | gpl-3.0 | 3,612 |
/* Copyright 2015 the SumatraPDF project authors (see AUTHORS file).
License: GPLv3 */
// utils
#include "BaseUtil.h"
#include "Dpi.h"
#include "FileUtil.h"
#include "GdiPlusUtil.h"
#include "LabelWithCloseWnd.h"
#include "UITask.h"
#include "WinUtil.h"
// rendering engines
#include "BaseEngine.h"
#include "EngineManager.h"
// layout controllers
#include "SettingsStructs.h"
#include "Controller.h"
#include "FileHistory.h"
#include "GlobalPrefs.h"
// ui
#include "SumatraPDF.h"
#include "WindowInfo.h"
#include "TabInfo.h"
#include "resource.h"
#include "AppPrefs.h"
#include "Favorites.h"
#include "Menu.h"
#include "SumatraDialogs.h"
#include "Tabs.h"
#include "Translations.h"
Favorite *Favorites::GetByMenuId(int menuId, DisplayState **dsOut)
{
DisplayState *ds;
for (size_t i = 0; (ds = gFileHistory.Get(i)) != nullptr; i++) {
for (size_t j = 0; j < ds->favorites->Count(); j++) {
if (menuId == ds->favorites->At(j)->menuId) {
if (dsOut)
*dsOut = ds;
return ds->favorites->At(j);
}
}
}
return nullptr;
}
DisplayState *Favorites::GetByFavorite(Favorite *fn)
{
DisplayState *ds;
for (size_t i = 0; (ds = gFileHistory.Get(i)) != nullptr; i++) {
if (ds->favorites->Contains(fn))
return ds;
}
return nullptr;
}
void Favorites::ResetMenuIds()
{
DisplayState *ds;
for (size_t i = 0; (ds = gFileHistory.Get(i)) != nullptr; i++) {
for (size_t j = 0; j < ds->favorites->Count(); j++) {
ds->favorites->At(j)->menuId = 0;
}
}
}
DisplayState *Favorites::GetFavByFilePath(const WCHAR *filePath)
{
// it's likely that we'll ask about the info for the same
// file as in previous call, so use one element cache
DisplayState *ds = gFileHistory.Get(idxCache);
if (!ds || !str::Eq(ds->filePath, filePath))
ds = gFileHistory.Find(filePath, &idxCache);
return ds;
}
bool Favorites::IsPageInFavorites(const WCHAR *filePath, int pageNo)
{
DisplayState *fav = GetFavByFilePath(filePath);
if (!fav)
return false;
for (size_t i = 0; i < fav->favorites->Count(); i++) {
if (pageNo == fav->favorites->At(i)->pageNo)
return true;
}
return false;
}
static Favorite *FindByPage(DisplayState *ds, int pageNo, const WCHAR *pageLabel=nullptr)
{
if (pageLabel) {
for (size_t i = 0; i < ds->favorites->Count(); i++) {
if (str::Eq(ds->favorites->At(i)->pageLabel, pageLabel))
return ds->favorites->At(i);
}
}
for (size_t i = 0; i < ds->favorites->Count(); i++) {
if (pageNo == ds->favorites->At(i)->pageNo)
return ds->favorites->At(i);
}
return nullptr;
}
static int SortByPageNo(const void *a, const void *b)
{
Favorite *na = *(Favorite **)a;
Favorite *nb = *(Favorite **)b;
// sort lower page numbers first
return na->pageNo - nb->pageNo;
}
void Favorites::AddOrReplace(const WCHAR *filePath, int pageNo, const WCHAR *name, const WCHAR *pageLabel)
{
DisplayState *fav = GetFavByFilePath(filePath);
if (!fav) {
CrashIf(gGlobalPrefs->rememberOpenedFiles);
fav = NewDisplayState(filePath);
gFileHistory.Append(fav);
}
Favorite *fn = FindByPage(fav, pageNo, pageLabel);
if (fn) {
str::ReplacePtr(&fn->name, name);
CrashIf(fn->pageLabel && !str::Eq(fn->pageLabel, pageLabel));
}
else {
fn = NewFavorite(pageNo, name, pageLabel);
fav->favorites->Append(fn);
fav->favorites->Sort(SortByPageNo);
}
}
void Favorites::Remove(const WCHAR *filePath, int pageNo)
{
DisplayState *fav = GetFavByFilePath(filePath);
if (!fav)
return;
Favorite *fn = FindByPage(fav, pageNo);
if (!fn)
return;
fav->favorites->Remove(fn);
DeleteFavorite(fn);
if (!gGlobalPrefs->rememberOpenedFiles && 0 == fav->favorites->Count()) {
gFileHistory.Remove(fav);
DeleteDisplayState(fav);
}
}
void Favorites::RemoveAllForFile(const WCHAR *filePath)
{
DisplayState *fav = GetFavByFilePath(filePath);
if (!fav)
return;
for (size_t i = 0; i < fav->favorites->Count(); i++) {
DeleteFavorite(fav->favorites->At(i));
}
fav->favorites->Reset();
if (!gGlobalPrefs->rememberOpenedFiles) {
gFileHistory.Remove(fav);
DeleteDisplayState(fav);
}
}
// Note: those might be too big
#define MAX_FAV_SUBMENUS 10
#define MAX_FAV_MENUS 10
MenuDef menuDefFavContext[] = {
{ _TRN("Remove from favorites"), IDM_FAV_DEL, 0 }
};
static bool HasFavorites()
{
DisplayState *ds;
for (size_t i = 0; (ds = gFileHistory.Get(i)) != nullptr; i++) {
if (ds->favorites->Count() > 0)
return true;
}
return false;
}
// caller has to free() the result
static WCHAR *FavReadableName(Favorite *fn)
{
ScopedMem<WCHAR> plainLabel(str::Format(L"%d", fn->pageNo));
const WCHAR *label = fn->pageLabel ? fn->pageLabel : plainLabel;
if (fn->name) {
ScopedMem<WCHAR> pageNo(str::Format(_TR("(page %s)"), label));
return str::Join(fn->name, L" ", pageNo);
}
return str::Format(_TR("Page %s"), label);
}
// caller has to free() the result
static WCHAR *FavCompactReadableName(DisplayState *fav, Favorite *fn, bool isCurrent=false)
{
ScopedMem<WCHAR> rn(FavReadableName(fn));
if (isCurrent)
return str::Format(L"%s : %s", _TR("Current file"), rn.Get());
const WCHAR *fp = path::GetBaseName(fav->filePath);
return str::Format(L"%s : %s", fp, rn.Get());
}
static void AppendFavMenuItems(HMENU m, DisplayState *f, UINT& idx, bool combined, bool isCurrent)
{
for (size_t i = 0; i < f->favorites->Count(); i++) {
if (i >= MAX_FAV_MENUS)
return;
Favorite *fn = f->favorites->At(i);
fn->menuId = idx++;
ScopedMem<WCHAR> s;
if (combined)
s.Set(FavCompactReadableName(f, fn, isCurrent));
else
s.Set(FavReadableName(fn));
AppendMenu(m, MF_STRING, (UINT_PTR)fn->menuId, win::menu::ToSafeString(s, s));
}
}
static int SortByBaseFileName(const void *a, const void *b)
{
const WCHAR *filePathA = *(const WCHAR **)a;
const WCHAR *filePathB = *(const WCHAR **)b;
return str::CmpNatural(path::GetBaseName(filePathA), path::GetBaseName(filePathB));
}
static void GetSortedFilePaths(Vec<const WCHAR *>& filePathsSortedOut, DisplayState *toIgnore=nullptr)
{
DisplayState *ds;
for (size_t i = 0; (ds = gFileHistory.Get(i)) != nullptr; i++) {
if (ds->favorites->Count() > 0 && ds != toIgnore)
filePathsSortedOut.Append(ds->filePath);
}
filePathsSortedOut.Sort(SortByBaseFileName);
}
// For easy access, we try to show favorites in the menu, similar to a list of
// recently opened files.
// The first menu items are for currently opened file (up to MAX_FAV_MENUS), based
// on the assumption that user is usually interested in navigating current file.
// Then we have a submenu for each file for which there are bookmarks (up to
// MAX_FAV_SUBMENUS), each having up to MAX_FAV_MENUS menu items.
// If not all favorites can be shown, we also enable "Show all favorites" menu which
// will provide a way to see all favorites.
// Note: not sure if that's the best layout. Maybe we should always use submenu and
// put the submenu for current file as the first one (potentially named as "Current file"
// or some such, to make it stand out from other submenus)
static void AppendFavMenus(HMENU m, const WCHAR *currFilePath)
{
// To minimize mouse movement when navigating current file via favorites
// menu, put favorites for current file first
DisplayState *currFileFav = nullptr;
if (currFilePath) {
currFileFav = gFavorites.GetFavByFilePath(currFilePath);
}
// sort the files with favorites by base file name of file path
Vec<const WCHAR *> filePathsSorted;
if (HasPermission(Perm_DiskAccess)) {
// only show favorites for other files, if we're allowed to open them
GetSortedFilePaths(filePathsSorted, currFileFav);
}
if (currFileFav && currFileFav->favorites->Count() > 0)
filePathsSorted.InsertAt(0, currFileFav->filePath);
if (filePathsSorted.Count() == 0)
return;
AppendMenu(m, MF_SEPARATOR, 0, nullptr);
gFavorites.ResetMenuIds();
UINT menuId = IDM_FAV_FIRST;
size_t menusCount = filePathsSorted.Count();
if (menusCount > MAX_FAV_MENUS)
menusCount = MAX_FAV_MENUS;
for (size_t i = 0; i < menusCount; i++) {
const WCHAR *filePath = filePathsSorted.At(i);
DisplayState *f = gFavorites.GetFavByFilePath(filePath);
CrashIf(!f);
HMENU sub = m;
bool combined = (f->favorites->Count() == 1);
if (!combined)
sub = CreateMenu();
AppendFavMenuItems(sub, f, menuId, combined, f == currFileFav);
if (!combined) {
if (f == currFileFav) {
AppendMenu(m, MF_POPUP | MF_STRING, (UINT_PTR)sub, _TR("Current file"));
} else {
ScopedMem<WCHAR> tmp;
const WCHAR *fileName = win::menu::ToSafeString(path::GetBaseName(filePath), tmp);
AppendMenu(m, MF_POPUP | MF_STRING, (UINT_PTR)sub, fileName);
}
}
}
}
// Called when a user opens "Favorites" top-level menu. We need to construct
// the menu:
// - disable add/remove menu items if no document is opened
// - if a document is opened and the page is already bookmarked,
// disable "add" menu item and enable "remove" menu item
// - if a document is opened and the page is not bookmarked,
// enable "add" menu item and disable "remove" menu item
void RebuildFavMenu(WindowInfo *win, HMENU menu)
{
if (!win->IsDocLoaded()) {
win::menu::SetEnabled(menu, IDM_FAV_ADD, false);
win::menu::SetEnabled(menu, IDM_FAV_DEL, false);
AppendFavMenus(menu, nullptr);
} else {
ScopedMem<WCHAR> label(win->ctrl->GetPageLabel(win->currPageNo));
bool isBookmarked = gFavorites.IsPageInFavorites(win->ctrl->FilePath(), win->currPageNo);
if (isBookmarked) {
win::menu::SetEnabled(menu, IDM_FAV_ADD, false);
ScopedMem<WCHAR> s(str::Format(_TR("Remove page %s from favorites"), label.Get()));
win::menu::SetText(menu, IDM_FAV_DEL, s);
} else {
win::menu::SetEnabled(menu, IDM_FAV_DEL, false);
ScopedMem<WCHAR> s(str::Format(_TR("Add page %s to favorites"), label.Get()));
win::menu::SetText(menu, IDM_FAV_ADD, s);
}
AppendFavMenus(menu, win->ctrl->FilePath());
}
win::menu::SetEnabled(menu, IDM_FAV_TOGGLE, HasFavorites());
}
void ToggleFavorites(WindowInfo *win)
{
if (gGlobalPrefs->showFavorites) {
SetSidebarVisibility(win, win->tocVisible, false);
} else {
SetSidebarVisibility(win, win->tocVisible, true);
SetFocus(win->hwndFavTree);
}
}
static void GoToFavorite(WindowInfo *win, int pageNo) {
if (!WindowInfoStillValid(win))
return;
if (win->IsDocLoaded() && win->ctrl->ValidPageNo(pageNo))
win->ctrl->GoToPage(pageNo, true);
// we might have been invoked by clicking on a tree view
// switch focus so that keyboard navigation works, which enables
// a fluid experience
win->Focus();
}
// Going to a bookmark within current file scrolls to a given page.
// Going to a bookmark in another file, loads the file and scrolls to a page
// (similar to how invoking one of the recently opened files works)
static void GoToFavorite(WindowInfo *win, DisplayState *f, Favorite *fn)
{
assert(f && fn);
if (!f || !fn) return;
WindowInfo *existingWin = FindWindowInfoByFile(f->filePath, true);
if (existingWin) {
int pageNo = fn->pageNo;
uitask::Post([=] { GoToFavorite(existingWin, pageNo); });
return;
}
if (!HasPermission(Perm_DiskAccess))
return;
// When loading a new document, go directly to selected page instead of
// first showing last seen page stored in file history
// A hacky solution because I don't want to add even more parameters to
// LoadDocument() and LoadDocumentInto()
int pageNo = fn->pageNo;
DisplayState *ds = gFileHistory.Find(f->filePath);
if (ds && !ds->useDefaultState && gGlobalPrefs->rememberStatePerDocument) {
ds->pageNo = fn->pageNo;
ds->scrollPos = PointI(-1, -1); // don't scroll the page
pageNo = -1;
}
LoadArgs args(f->filePath, win);
win = LoadDocument(args);
if (win) {
uitask::Post([=] { (win, pageNo); });
}
}
void GoToFavoriteByMenuId(WindowInfo *win, int wmId)
{
DisplayState *f;
Favorite *fn = gFavorites.GetByMenuId(wmId, &f);
if (fn)
GoToFavorite(win, f, fn);
}
static void GoToFavForTVItem(WindowInfo* win, HWND hTV, HTREEITEM hItem=nullptr)
{
if (nullptr == hItem)
hItem = TreeView_GetSelection(hTV);
TVITEM item;
item.hItem = hItem;
item.mask = TVIF_PARAM;
TreeView_GetItem(hTV, &item);
Favorite *fn = (Favorite *)item.lParam;
if (!fn) {
// can happen for top-level node which is not associated with a favorite
// but only serves a parent node for favorites for a given file
return;
}
DisplayState *f = gFavorites.GetByFavorite(fn);
GoToFavorite(win, f, fn);
}
static HTREEITEM InsertFavSecondLevelNode(HWND hwnd, HTREEITEM parent, Favorite *fn)
{
TV_INSERTSTRUCT tvinsert;
tvinsert.hParent = parent;
tvinsert.hInsertAfter = TVI_LAST;
tvinsert.itemex.mask = TVIF_TEXT | TVIF_STATE | TVIF_PARAM;
tvinsert.itemex.state = 0;
tvinsert.itemex.stateMask = TVIS_EXPANDED;
tvinsert.itemex.lParam = (LPARAM)fn;
ScopedMem<WCHAR> s(FavReadableName(fn));
tvinsert.itemex.pszText = s;
return TreeView_InsertItem(hwnd, &tvinsert);
}
static void InsertFavSecondLevelNodes(HWND hwnd, HTREEITEM parent, DisplayState *f)
{
for (size_t i = 0; i < f->favorites->Count(); i++) {
InsertFavSecondLevelNode(hwnd, parent, f->favorites->At(i));
}
}
static HTREEITEM InsertFavTopLevelNode(HWND hwnd, DisplayState *fav, bool isExpanded)
{
WCHAR *s = nullptr;
bool collapsed = fav->favorites->Count() == 1;
if (collapsed)
isExpanded = false;
TV_INSERTSTRUCT tvinsert;
tvinsert.hParent = nullptr;
tvinsert.hInsertAfter = TVI_LAST;
tvinsert.itemex.mask = TVIF_TEXT | TVIF_STATE | TVIF_PARAM;
tvinsert.itemex.state = isExpanded ? TVIS_EXPANDED : 0;
tvinsert.itemex.stateMask = TVIS_EXPANDED;
tvinsert.itemex.lParam = 0;
if (collapsed) {
Favorite *fn = fav->favorites->At(0);
tvinsert.itemex.lParam = (LPARAM)fn;
s = FavCompactReadableName(fav, fn);
tvinsert.itemex.pszText = s;
} else {
tvinsert.itemex.pszText = (WCHAR*)path::GetBaseName(fav->filePath);
}
HTREEITEM ret = TreeView_InsertItem(hwnd, &tvinsert);
free(s);
return ret;
}
void PopulateFavTreeIfNeeded(WindowInfo *win)
{
HWND hwndTree = win->hwndFavTree;
if (TreeView_GetCount(hwndTree) > 0)
return;
Vec<const WCHAR *> filePathsSorted;
GetSortedFilePaths(filePathsSorted);
SendMessage(hwndTree, WM_SETREDRAW, FALSE, 0);
for (size_t i = 0; i < filePathsSorted.Count(); i++) {
DisplayState *f = gFavorites.GetFavByFilePath(filePathsSorted.At(i));
bool isExpanded = win->expandedFavorites.Contains(f);
HTREEITEM node = InsertFavTopLevelNode(hwndTree, f, isExpanded);
if (f->favorites->Count() > 1)
InsertFavSecondLevelNodes(hwndTree, node, f);
}
SendMessage(hwndTree, WM_SETREDRAW, TRUE, 0);
UINT fl = RDW_ERASE | RDW_FRAME | RDW_INVALIDATE | RDW_ALLCHILDREN;
RedrawWindow(hwndTree, nullptr, nullptr, fl);
}
void UpdateFavoritesTree(WindowInfo *win)
{
HWND hwndTree = win->hwndFavTree;
if (TreeView_GetCount(hwndTree) > 0) {
// PopulateFavTreeIfNeeded will re-enable WM_SETREDRAW
SendMessage(hwndTree, WM_SETREDRAW, FALSE, 0);
TreeView_DeleteAllItems(hwndTree);
PopulateFavTreeIfNeeded(win);
}
// hide the favorites tree if we've removed the last favorite
if (0 == TreeView_GetCount(hwndTree)) {
SetSidebarVisibility(win, win->tocVisible, false);
}
}
void UpdateFavoritesTreeForAllWindows()
{
for (WindowInfo *win : gWindows) {
UpdateFavoritesTree(win);
}
}
static DocTocItem *TocItemForPageNo(DocTocItem *item, int pageNo)
{
DocTocItem *currItem = nullptr;
for (; item; item = item->next) {
if (1 <= item->pageNo && item->pageNo <= pageNo)
currItem = item;
if (item->pageNo >= pageNo)
break;
// find any child item closer to the specified page
DocTocItem *subItem = TocItemForPageNo(item->child, pageNo);
if (subItem)
currItem = subItem;
}
return currItem;
}
void AddFavorite(WindowInfo *win)
{
TabInfo *tab = win->currentTab;
CrashIf(!tab);
int pageNo = win->currPageNo;
ScopedMem<WCHAR> name;
if (tab->ctrl->HasTocTree()) {
// use the current ToC heading as default name
DocTocItem *root = tab->ctrl->GetTocTree();
DocTocItem *item = TocItemForPageNo(root, pageNo);
if (item)
name.Set(str::Dup(item->title));
delete root;
}
ScopedMem<WCHAR> pageLabel(tab->ctrl->GetPageLabel(pageNo));
bool shouldAdd = Dialog_AddFavorite(win->hwndFrame, pageLabel, name);
if (!shouldAdd)
return;
ScopedMem<WCHAR> plainLabel(str::Format(L"%d", pageNo));
bool needsLabel = !str::Eq(plainLabel, pageLabel);
RememberFavTreeExpansionStateForAllWindows();
gFavorites.AddOrReplace(tab->filePath, pageNo, name, needsLabel ? pageLabel.Get() : nullptr);
// expand newly added favorites by default
DisplayState *fav = gFavorites.GetFavByFilePath(tab->filePath);
if (fav && fav->favorites->Count() == 2)
win->expandedFavorites.Append(fav);
UpdateFavoritesTreeForAllWindows();
prefs::Save();
}
void DelFavorite(WindowInfo *win)
{
CrashIf(!win->currentTab);
RememberFavTreeExpansionStateForAllWindows();
gFavorites.Remove(win->currentTab->filePath, win->currPageNo);
UpdateFavoritesTreeForAllWindows();
prefs::Save();
}
void RememberFavTreeExpansionState(WindowInfo *win)
{
win->expandedFavorites.Reset();
HTREEITEM treeItem = TreeView_GetRoot(win->hwndFavTree);
while (treeItem) {
TVITEM item;
item.hItem = treeItem;
item.mask = TVIF_PARAM | TVIF_STATE;
item.stateMask = TVIS_EXPANDED;
TreeView_GetItem(win->hwndFavTree, &item);
if ((item.state & TVIS_EXPANDED) != 0) {
item.hItem = TreeView_GetChild(win->hwndFavTree, treeItem);
item.mask = TVIF_PARAM;
TreeView_GetItem(win->hwndFavTree, &item);
Favorite *fn = (Favorite *)item.lParam;
DisplayState *f = gFavorites.GetByFavorite(fn);
win->expandedFavorites.Append(f);
}
treeItem = TreeView_GetNextSibling(win->hwndFavTree, treeItem);
}
}
void RememberFavTreeExpansionStateForAllWindows()
{
for (size_t i = 0; i < gWindows.Count(); i++) {
RememberFavTreeExpansionState(gWindows.At(i));
}
}
static LRESULT OnFavTreeNotify(WindowInfo *win, LPNMTREEVIEW pnmtv)
{
switch (pnmtv->hdr.code)
{
// TVN_SELCHANGED intentionally not implemented (mouse clicks are handled
// in NM_CLICK, and keyboard navigation in NM_RETURN and TVN_KEYDOWN)
case TVN_KEYDOWN: {
TV_KEYDOWN *ptvkd = (TV_KEYDOWN *)pnmtv;
if (VK_TAB == ptvkd->wVKey) {
if (win->tabsVisible && IsCtrlPressed())
TabsOnCtrlTab(win, IsShiftPressed());
else
AdvanceFocus(win);
return 1;
}
break;
}
case NM_CLICK: {
// Determine which item has been clicked (if any)
TVHITTESTINFO ht = { 0 };
DWORD pos = GetMessagePos();
ht.pt.x = GET_X_LPARAM(pos);
ht.pt.y = GET_Y_LPARAM(pos);
MapWindowPoints(HWND_DESKTOP, pnmtv->hdr.hwndFrom, &ht.pt, 1);
TreeView_HitTest(pnmtv->hdr.hwndFrom, &ht);
if ((ht.flags & TVHT_ONITEM))
GoToFavForTVItem(win, pnmtv->hdr.hwndFrom, ht.hItem);
break;
}
case NM_RETURN:
GoToFavForTVItem(win, pnmtv->hdr.hwndFrom);
break;
case NM_CUSTOMDRAW:
return CDRF_DODEFAULT;
}
return -1;
}
static void OnFavTreeContextMenu(WindowInfo *win, PointI pt)
{
TVITEM item;
if (pt.x != -1 || pt.y != -1) {
TVHITTESTINFO ht = { 0 };
ht.pt.x = pt.x;
ht.pt.y = pt.y;
MapWindowPoints(HWND_DESKTOP, win->hwndFavTree, &ht.pt, 1);
TreeView_HitTest(win->hwndFavTree, &ht);
if ((ht.flags & TVHT_ONITEM) == 0)
return; // only display menu if over a node in tree
TreeView_SelectItem(win->hwndFavTree, ht.hItem);
item.hItem = ht.hItem;
}
else {
item.hItem = TreeView_GetSelection(win->hwndFavTree);
if (!item.hItem)
return;
RECT rcItem;
if (TreeView_GetItemRect(win->hwndFavTree, item.hItem, &rcItem, TRUE)) {
MapWindowPoints(win->hwndFavTree, HWND_DESKTOP, (POINT *)&rcItem, 2);
pt.x = rcItem.left;
pt.y = rcItem.bottom;
}
else {
WindowRect rc(win->hwndFavTree);
pt = rc.TL();
}
}
item.mask = TVIF_PARAM;
TreeView_GetItem(win->hwndFavTree, &item);
Favorite *toDelete = (Favorite *)item.lParam;
HMENU popup = BuildMenuFromMenuDef(menuDefFavContext, dimof(menuDefFavContext), CreatePopupMenu());
INT cmd = TrackPopupMenu(popup, TPM_RETURNCMD | TPM_RIGHTBUTTON,
pt.x, pt.y, 0, win->hwndFavTree, nullptr);
DestroyMenu(popup);
if (IDM_FAV_DEL == cmd) {
RememberFavTreeExpansionStateForAllWindows();
if (toDelete) {
DisplayState *f = gFavorites.GetByFavorite(toDelete);
gFavorites.Remove(f->filePath, toDelete->pageNo);
} else {
// toDelete == nullptr => this is a parent node signifying all bookmarks in a file
item.hItem = TreeView_GetChild(win->hwndFavTree, item.hItem);
item.mask = TVIF_PARAM;
TreeView_GetItem(win->hwndFavTree, &item);
toDelete = (Favorite *)item.lParam;
DisplayState *f = gFavorites.GetByFavorite(toDelete);
gFavorites.RemoveAllForFile(f->filePath);
}
UpdateFavoritesTreeForAllWindows();
prefs::Save();
// TODO: it would be nice to have a system for undo-ing things, like in Gmail,
// so that we can do destructive operations without asking for permission via
// invasive model dialog boxes but also allow reverting them if were done
// by mistake
}
}
static WNDPROC DefWndProcFavTree = nullptr;
static LRESULT CALLBACK WndProcFavTree(HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
WindowInfo *win = FindWindowInfoByHwnd(hwnd);
if (!win)
return CallWindowProc(DefWndProcFavTree, hwnd, msg, wParam, lParam);
switch (msg) {
case WM_ERASEBKGND:
return FALSE;
case WM_CHAR:
if (VK_ESCAPE == wParam && gGlobalPrefs->escToExit && MayCloseWindow(win))
CloseWindow(win, true);
break;
case WM_MOUSEWHEEL:
case WM_MOUSEHWHEEL:
// scroll the canvas if the cursor isn't over the ToC tree
if (!IsCursorOverWindow(win->hwndFavTree))
return SendMessage(win->hwndCanvas, msg, wParam, lParam);
break;
}
return CallWindowProc(DefWndProcFavTree, hwnd, msg, wParam, lParam);
}
static WNDPROC DefWndProcFavBox = nullptr;
static LRESULT CALLBACK WndProcFavBox(HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam)
{
WindowInfo *win = FindWindowInfoByHwnd(hwnd);
if (!win)
return CallWindowProc(DefWndProcFavBox, hwnd, message, wParam, lParam);
switch (message) {
case WM_SIZE:
LayoutTreeContainer(win->favLabelWithClose, win->hwndFavTree);
break;
case WM_COMMAND:
if (LOWORD(wParam) == IDC_FAV_LABEL_WITH_CLOSE)
ToggleFavorites(win);
break;
case WM_NOTIFY:
if (LOWORD(wParam) == IDC_FAV_TREE) {
LPNMTREEVIEW pnmtv = (LPNMTREEVIEW) lParam;
LRESULT res = OnFavTreeNotify(win, pnmtv);
if (res != -1)
return res;
}
break;
case WM_CONTEXTMENU:
if (win->hwndFavTree == (HWND)wParam) {
OnFavTreeContextMenu(win, PointI(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam)));
return 0;
}
break;
}
return CallWindowProc(DefWndProcFavBox, hwnd, message, wParam, lParam);
}
void CreateFavorites(WindowInfo *win)
{
win->hwndFavBox = CreateWindow(WC_STATIC, L"", WS_CHILD|WS_CLIPCHILDREN,
0, 0, gGlobalPrefs->sidebarDx, 0,
win->hwndFrame, (HMENU)0, GetModuleHandle(nullptr), nullptr);
LabelWithCloseWnd *l = CreateLabelWithCloseWnd(win->hwndFavBox, IDC_FAV_LABEL_WITH_CLOSE);
win->favLabelWithClose = l;
SetPaddingXY(l, 2, 2);
SetFont(l, GetDefaultGuiFont());
// label is set in UpdateToolbarSidebarText()
win->hwndFavTree = CreateWindowEx(WS_EX_STATICEDGE, WC_TREEVIEW, L"Fav",
TVS_HASBUTTONS|TVS_HASLINES|TVS_LINESATROOT|TVS_SHOWSELALWAYS|
TVS_TRACKSELECT|TVS_DISABLEDRAGDROP|TVS_NOHSCROLL|TVS_INFOTIP|
WS_TABSTOP|WS_VISIBLE|WS_CHILD,
0, 0, 0, 0, win->hwndFavBox, (HMENU)IDC_FAV_TREE, GetModuleHandle(nullptr), nullptr);
TreeView_SetUnicodeFormat(win->hwndFavTree, true);
if (nullptr == DefWndProcFavTree)
DefWndProcFavTree = (WNDPROC)GetWindowLongPtr(win->hwndFavTree, GWLP_WNDPROC);
SetWindowLongPtr(win->hwndFavTree, GWLP_WNDPROC, (LONG_PTR)WndProcFavTree);
if (nullptr == DefWndProcFavBox)
DefWndProcFavBox = (WNDPROC)GetWindowLongPtr(win->hwndFavBox, GWLP_WNDPROC);
SetWindowLongPtr(win->hwndFavBox, GWLP_WNDPROC, (LONG_PTR)WndProcFavBox);
}
| the7day/sumatrapdf | src/Favorites.cpp | C++ | gpl-3.0 | 27,609 |
import dataclasses
from typing import ClassVar, Dict, List, Set, Tuple, Type
from collections import OrderedDict
@dataclasses.dataclass
class A:
a: List[int] = <error descr="Mutable default '[]' is not allowed. Use 'default_factory'">[]</error>
b: List[int] = <error descr="Mutable default 'list()' is not allowed. Use 'default_factory'">list()</error>
c: Set[int] = <error descr="Mutable default '{1}' is not allowed. Use 'default_factory'">{1}</error>
d: Set[int] = <error descr="Mutable default 'set()' is not allowed. Use 'default_factory'">set()</error>
e: Tuple[int, ...] = ()
f: Tuple[int, ...] = tuple()
g: ClassVar[List[int]] = []
h: ClassVar = []
i: Dict[int, int] = <error descr="Mutable default '{1: 2}' is not allowed. Use 'default_factory'">{1: 2}</error>
j: Dict[int, int] = <error descr="Mutable default 'dict()' is not allowed. Use 'default_factory'">dict()</error>
k = []
l = list()
m: Dict[int, int] = <error descr="Mutable default 'OrderedDict()' is not allowed. Use 'default_factory'">OrderedDict()</error>
n: FrozenSet[int] = frozenset()
a2: Type[List[int]] = list
b2: Type[Set[int]] = set
c2: Type[Tuple[int, ...]] = tuple | siosio/intellij-community | python/testData/inspections/PyDataclassInspection/defaultFieldValue.py | Python | apache-2.0 | 1,215 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.textstructure.structurefinder;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.xpack.textstructure.structurefinder.GrokPatternCreator.ValueOnlyGrokPatternCandidate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.containsInAnyOrder;
public class GrokPatternCreatorTests extends TextStructureTestCase {
public void testBuildFieldName() {
Map<String, Integer> fieldNameCountStore = new HashMap<>();
assertEquals("field", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field"));
assertEquals("field2", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field"));
assertEquals("field3", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field"));
assertEquals("extra_timestamp", GrokPatternCreator.buildFieldName(fieldNameCountStore, "extra_timestamp"));
assertEquals("field4", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field"));
assertEquals("uri", GrokPatternCreator.buildFieldName(fieldNameCountStore, "uri"));
assertEquals("extra_timestamp2", GrokPatternCreator.buildFieldName(fieldNameCountStore, "extra_timestamp"));
assertEquals("field5", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field"));
}
public void testPopulatePrefacesAndEpiloguesGivenTimestamp() {
Collection<String> matchingStrings = Arrays.asList(
"[2018-01-25T15:33:23] DEBUG ",
"[2018-01-24T12:33:23] ERROR ",
"junk [2018-01-22T07:33:23] INFO ",
"[2018-01-21T03:33:23] DEBUG "
);
ValueOnlyGrokPatternCandidate candidate = new ValueOnlyGrokPatternCandidate("TIMESTAMP_ISO8601", "date", "extra_timestamp");
Map<String, Integer> fieldNameCountStore = new HashMap<>();
Collection<String> prefaces = new ArrayList<>();
Collection<String> epilogues = new ArrayList<>();
candidate.processCaptures(explanation, fieldNameCountStore, matchingStrings, prefaces, epilogues, null, null, NOOP_TIMEOUT_CHECKER);
assertThat(prefaces, containsInAnyOrder("[", "[", "junk [", "["));
assertThat(epilogues, containsInAnyOrder("] DEBUG ", "] ERROR ", "] INFO ", "] DEBUG "));
}
public void testPopulatePrefacesAndEpiloguesGivenEmailAddress() {
Collection<String> matchingStrings = Arrays.asList("before alice@acme.com after", "abc bob@acme.com xyz", "carol@acme.com");
ValueOnlyGrokPatternCandidate candidate = new ValueOnlyGrokPatternCandidate("EMAILADDRESS", "keyword", "email");
Map<String, Integer> fieldNameCountStore = new HashMap<>();
Collection<String> prefaces = new ArrayList<>();
Collection<String> epilogues = new ArrayList<>();
candidate.processCaptures(explanation, fieldNameCountStore, matchingStrings, prefaces, epilogues, null, null, NOOP_TIMEOUT_CHECKER);
assertThat(prefaces, containsInAnyOrder("before ", "abc ", ""));
assertThat(epilogues, containsInAnyOrder(" after", " xyz", ""));
}
public void testAppendBestGrokMatchForStringsGivenTimestampsAndLogLevels() {
Collection<String> snippets = Arrays.asList(
"[2018-01-25T15:33:23] DEBUG ",
"[2018-01-24T12:33:23] ERROR ",
"junk [2018-01-22T07:33:23] INFO ",
"[2018-01-21T03:33:23] DEBUG "
);
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
assertEquals(
".*?\\[%{TIMESTAMP_ISO8601:extra_timestamp}\\] %{LOGLEVEL:loglevel} ",
grokPatternCreator.getOverallGrokPatternBuilder().toString()
);
}
public void testAppendBestGrokMatchForStringsGivenNumbersInBrackets() {
Collection<String> snippets = Arrays.asList("(-2)", " (-3)", " (4)", " (-5) ");
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
assertEquals(".*?\\(%{INT:field}\\).*?", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testAppendBestGrokMatchForStringsGivenNegativeNumbersWithoutBreak() {
Collection<String> snippets = Arrays.asList("before-2 ", "prior to-3", "-4");
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
// It seems sensible that we don't detect these suffices as either base 10 or base 16 numbers
assertEquals(".*?", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testAppendBestGrokMatchForStringsGivenHexNumbers() {
Collection<String> snippets = Arrays.asList(" abc", " 123", " -123", "1f is hex");
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
assertEquals(".*?%{BASE16NUM:field}.*?", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testAppendBestGrokMatchForStringsGivenHostnamesWithNumbers() {
Collection<String> snippets = Arrays.asList("<host1.1.p2ps:", "<host2.1.p2ps:");
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
// We don't want the .1. in the middle to get detected as a hex number
assertEquals("<.*?:", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testAppendBestGrokMatchForStringsGivenEmailAddresses() {
Collection<String> snippets = Arrays.asList("before alice@acme.com after", "abc bob@acme.com xyz", "carol@acme.com");
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
assertEquals(".*?%{EMAILADDRESS:email}.*?", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testAppendBestGrokMatchForStringsGivenUris() {
Collection<String> snippets = Arrays.asList(
"main site https://www.elastic.co/ with trailing slash",
"https://www.elastic.co/guide/en/x-pack/current/ml-configuring-categories.html#ml-configuring-categories is a section",
"download today from https://www.elastic.co/downloads"
);
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
assertEquals(".*?%{URI:uri}.*?", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testAppendBestGrokMatchForStringsGivenPaths() {
Collection<String> snippets = Arrays.asList("on Mac /Users/dave", "on Windows C:\\Users\\dave", "on Linux /home/dave");
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
assertEquals(".*? .*? %{PATH:path}", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testAppendBestGrokMatchForStringsGivenKvPairs() {
Collection<String> snippets = Arrays.asList(
"foo=1 and bar=a",
"something foo=2 bar=b something else",
"foo=3 bar=c",
" foo=1 bar=a "
);
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0);
assertEquals(".*?\\bfoo=%{USER:foo} .*?\\bbar=%{USER:bar}.*?", grokPatternCreator.getOverallGrokPatternBuilder().toString());
}
public void testCreateGrokPatternFromExamplesGivenNamedLogs() {
Collection<String> sampleMessages = Arrays.asList(
"Sep 8 11:55:06 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'elastic.slack.com/A/IN': 95.110.64.205#53",
"Sep 8 11:55:08 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'slack-imgs.com/A/IN': 95.110.64.205#53",
"Sep 8 11:55:35 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'www.elastic.co/A/IN': 95.110.68.206#53",
"Sep 8 11:55:42 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'b.akamaiedge.net/A/IN': 95.110.64.205#53"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
assertEquals(
"%{SYSLOGTIMESTAMP:timestamp} .*? .*?\\[%{INT:field}\\]: %{LOGLEVEL:loglevel} \\(.*? .*? .*?\\) .*? "
+ "%{QUOTEDSTRING:field2}: %{IP:ipaddress}#%{INT:field3}",
grokPatternCreator.createGrokPatternFromExamples("SYSLOGTIMESTAMP", TextStructureUtils.DATE_MAPPING_WITHOUT_FORMAT, "timestamp")
);
assertEquals(5, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("field2"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field3"));
}
public void testCreateGrokPatternFromExamplesGivenCatalinaLogs() {
Collection<String> sampleMessages = Arrays.asList(
"Aug 29, 2009 12:03:33 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: "
+ "Invalid chunk ignored.",
"Aug 29, 2009 12:03:40 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: "
+ "Invalid chunk ignored.",
"Aug 29, 2009 12:03:45 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: "
+ "Invalid chunk ignored.",
"Aug 29, 2009 12:03:57 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: "
+ "Invalid chunk ignored."
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
assertEquals(
"%{CATALINA_DATESTAMP:timestamp} .*? .*?\\n%{LOGLEVEL:loglevel}: .*",
grokPatternCreator.createGrokPatternFromExamples(
"CATALINA_DATESTAMP",
TextStructureUtils.DATE_MAPPING_WITHOUT_FORMAT,
"timestamp"
)
);
assertEquals(1, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel"));
}
public void testCreateGrokPatternFromExamplesGivenMultiTimestampLogs() {
// Two timestamps: one local, one UTC
Collection<String> sampleMessages = Arrays.asList(
"559550912540598297\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986880\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986887\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912603512850\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
assertEquals(
"%{INT:field}\\t%{TIMESTAMP_ISO8601:timestamp}\\t%{TIMESTAMP_ISO8601:extra_timestamp}\\t%{INT:field2}\\t.*?\\t"
+ "%{IP:ipaddress}\\t.*?\\t%{LOGLEVEL:loglevel}\\t.*",
grokPatternCreator.createGrokPatternFromExamples(
"TIMESTAMP_ISO8601",
TextStructureUtils.DATE_MAPPING_WITHOUT_FORMAT,
"timestamp"
)
);
assertEquals(5, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field"));
Map<String, String> expectedDateMapping = new HashMap<>();
expectedDateMapping.put(TextStructureUtils.MAPPING_TYPE_SETTING, "date");
expectedDateMapping.put(TextStructureUtils.MAPPING_FORMAT_SETTING, "iso8601");
assertEquals(expectedDateMapping, mappings.get("extra_timestamp"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field2"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel"));
}
public void testCreateGrokPatternFromExamplesGivenMultiTimestampLogsAndIndeterminateFormat() {
// Two timestamps: one ISO8601, one indeterminate day/month
Collection<String> sampleMessages = Arrays.asList(
"559550912540598297\t2016-04-20T14:06:53\t20/04/2016 21:06:53,123456\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986880\t2016-04-20T14:06:53\t20/04/2016 21:06:53,123456\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986887\t2016-04-20T14:06:53\t20/04/2016 21:06:53,123456\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912603512850\t2016-04-20T14:06:53\t20/04/2016 21:06:53,123456\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
assertEquals(
"%{INT:field}\\t%{TIMESTAMP_ISO8601:timestamp}\\t%{DATESTAMP:extra_timestamp}\\t%{INT:field2}\\t.*?\\t"
+ "%{IP:ipaddress}\\t.*?\\t%{LOGLEVEL:loglevel}\\t.*",
grokPatternCreator.createGrokPatternFromExamples(
"TIMESTAMP_ISO8601",
TextStructureUtils.DATE_MAPPING_WITHOUT_FORMAT,
"timestamp"
)
);
assertEquals(5, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field"));
Map<String, String> expectedDateMapping = new HashMap<>();
expectedDateMapping.put(TextStructureUtils.MAPPING_TYPE_SETTING, "date_nanos");
expectedDateMapping.put(TextStructureUtils.MAPPING_FORMAT_SETTING, "dd/MM/yyyy HH:mm:ss,SSSSSS");
assertEquals(expectedDateMapping, mappings.get("extra_timestamp"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field2"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel"));
}
public void testCreateGrokPatternFromExamplesGivenMultiTimestampLogsAndCustomDefinition() {
// Two timestamps: one custom, one built-in
Collection<String> sampleMessages = Arrays.asList(
"559550912540598297\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986880\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986887\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912603512850\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.singletonMap("CUSTOM_TIMESTAMP", "%{MONTHNUM}/%{MONTHDAY}/%{YEAR} %{HOUR}:%{MINUTE}(?:AM|PM)"),
NOOP_TIMEOUT_CHECKER
);
Map<String, String> customMapping = new HashMap<>();
customMapping.put(TextStructureUtils.MAPPING_TYPE_SETTING, "date");
customMapping.put(TextStructureUtils.MAPPING_FORMAT_SETTING, "M/dd/yyyy h:mma");
assertEquals(
"%{INT:field}\\t%{CUSTOM_TIMESTAMP:timestamp}\\t%{TIMESTAMP_ISO8601:extra_timestamp}\\t%{INT:field2}\\t.*?\\t"
+ "%{IP:ipaddress}\\t.*?\\t%{LOGLEVEL:loglevel}\\t.*",
grokPatternCreator.createGrokPatternFromExamples("CUSTOM_TIMESTAMP", customMapping, "timestamp")
);
assertEquals(5, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field"));
Map<String, String> expectedDateMapping = new HashMap<>();
expectedDateMapping.put(TextStructureUtils.MAPPING_TYPE_SETTING, "date");
expectedDateMapping.put(TextStructureUtils.MAPPING_FORMAT_SETTING, "iso8601");
assertEquals(expectedDateMapping, mappings.get("extra_timestamp"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field2"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel"));
}
public void testCreateGrokPatternFromExamplesGivenTimestampAndTimeWithoutDate() {
// Two timestamps: one with date, one without
Collection<String> sampleMessages = Arrays.asList(
"559550912540598297\t2016-04-20T14:06:53\t21:06:53.123456\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986880\t2016-04-20T14:06:53\t21:06:53.123456\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986887\t2016-04-20T14:06:53\t21:06:53.123456\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912603512850\t2016-04-20T14:06:53\t21:06:53.123456\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
assertEquals(
"%{INT:field}\\t%{TIMESTAMP_ISO8601:timestamp}\\t%{TIME:time}\\t%{INT:field2}\\t.*?\\t"
+ "%{IP:ipaddress}\\t.*?\\t%{LOGLEVEL:loglevel}\\t.*",
grokPatternCreator.createGrokPatternFromExamples(
"TIMESTAMP_ISO8601",
TextStructureUtils.DATE_MAPPING_WITHOUT_FORMAT,
"timestamp"
)
);
assertEquals(5, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("time"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field2"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel"));
}
public void testFindFullLineGrokPatternGivenApacheCombinedLogs() {
Collection<String> sampleMessages = Arrays.asList(
"83.149.9.216 - - [19/Jan/2016:08:13:42 +0000] "
+ "\"GET /presentations/logstash-monitorama-2013/images/kibana-search.png HTTP/1.1\" 200 203023 "
+ "\"http://semicomplete.com/presentations/logstash-monitorama-2013/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\"",
"83.149.9.216 - - [19/Jan/2016:08:13:44 +0000] "
+ "\"GET /presentations/logstash-monitorama-2013/plugin/zoom-js/zoom.js HTTP/1.1\" 200 7697 "
+ "\"http://semicomplete.com/presentations/logstash-monitorama-2013/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\"",
"83.149.9.216 - - [19/Jan/2016:08:13:44 +0000] "
+ "\"GET /presentations/logstash-monitorama-2013/plugin/highlight/highlight.js HTTP/1.1\" 200 26185 "
+ "\"http://semicomplete.com/presentations/logstash-monitorama-2013/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\"",
"83.149.9.216 - - [19/Jan/2016:08:13:42 +0000] "
+ "\"GET /presentations/logstash-monitorama-2013/images/sad-medic.png HTTP/1.1\" 200 430406 "
+ "\"http://semicomplete.com/presentations/logstash-monitorama-2013/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\""
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
assertEquals(
new Tuple<>("timestamp", "%{COMBINEDAPACHELOG}"),
grokPatternCreator.findFullLineGrokPattern(randomBoolean() ? "timestamp" : null)
);
assertEquals(10, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "text"), mappings.get("agent"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("auth"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("bytes"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("clientip"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "double"), mappings.get("httpversion"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("ident"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("referrer"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("request"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("response"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("verb"));
}
public void testAdjustForPunctuationGivenCommonPrefix() {
Collection<String> snippets = """
","lab6.localhost","Route Domain","/Common/0","No-lookup","192.168.33.212","No-lookup","192.168.33.132","80","46721",\
"/Common/Subnet_33","TCP","0","","","","","","","","Staged","/Common/policy1","rule1","Accept","","","",\
"0000000000000000"
","lab6.localhost","Route Domain","/Common/0","No-lookup","192.168.143.244","No-lookup","192.168.33.106","55025","162",\
"/Common/Subnet_33","UDP","0","","","","","","","","Staged","/Common/policy1","rule1","Accept","","","",\
"0000000000000000"
","lab6.localhost","Route Domain","/Common/0","No-lookup","192.168.33.3","No-lookup","224.0.0.102","3222","3222",\
"/Common/Subnet_33","UDP","0","","","","","","","","Staged","/Common/policy1","rule1","Accept","","","",\
"0000000000000000"\
""".lines().toList();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
Collection<String> adjustedSnippets = grokPatternCreator.adjustForPunctuation(snippets);
assertEquals("\",", grokPatternCreator.getOverallGrokPatternBuilder().toString());
assertNotNull(adjustedSnippets);
assertThat(
new ArrayList<>(adjustedSnippets),
containsInAnyOrder(snippets.stream().map(snippet -> snippet.substring(2)).toArray(String[]::new))
);
}
public void testAdjustForPunctuationGivenNoCommonPrefix() {
Collection<String> snippets = Arrays.asList(
"|client (id:2) was removed from servergroup 'Normal'(id:7) by client 'User1'(id:2)",
"|servergroup 'GAME'(id:9) was added by 'User1'(id:2)",
"|permission 'i_group_auto_update_type'(id:146) with values (value:30, negated:0, skipchannel:0) "
+ "was added by 'User1'(id:2) to servergroup 'GAME'(id:9)"
);
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
snippets,
null,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
Collection<String> adjustedSnippets = grokPatternCreator.adjustForPunctuation(snippets);
assertEquals("", grokPatternCreator.getOverallGrokPatternBuilder().toString());
assertSame(snippets, adjustedSnippets);
}
public void testValidateFullLineGrokPatternGivenValid() {
String timestampField = "utc_timestamp";
String grokPattern = "%{INT:serial_no}\\t%{TIMESTAMP_ISO8601:local_timestamp}\\t%{TIMESTAMP_ISO8601:utc_timestamp}\\t"
+ "%{INT:user_id}\\t%{HOSTNAME:host}\\t%{IP:client_ip}\\t%{WORD:method}\\t%{LOGLEVEL:severity}\\t%{PROG:program}\\t"
+ "%{GREEDYDATA:message}";
// Two timestamps: one local, one UTC
Collection<String> sampleMessages = Arrays.asList(
"559550912540598297\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986880\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986887\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912603512850\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.validateFullLineGrokPattern(grokPattern, timestampField);
assertEquals(9, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("serial_no"));
Map<String, String> expectedDateMapping = new HashMap<>();
expectedDateMapping.put(TextStructureUtils.MAPPING_TYPE_SETTING, "date");
expectedDateMapping.put(TextStructureUtils.MAPPING_FORMAT_SETTING, "iso8601");
assertEquals(expectedDateMapping, mappings.get("local_timestamp"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("user_id"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("host"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("client_ip"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("method"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("severity"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("program"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("message"));
}
public void testValidateFullLineGrokPatternGivenValidAndCustomDefinition() {
String timestampField = "local_timestamp";
String grokPattern = "%{INT:serial_no}\\t%{CUSTOM_TIMESTAMP:local_timestamp}\\t%{TIMESTAMP_ISO8601:utc_timestamp}\\t"
+ "%{INT:user_id}\\t%{HOSTNAME:host}\\t%{IP:client_ip}\\t%{WORD:method}\\t%{LOGLEVEL:severity}\\t%{PROG:program}\\t"
+ "%{GREEDYDATA:message}";
// Two timestamps: one local, one UTC
Collection<String> sampleMessages = Arrays.asList(
"559550912540598297\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986880\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912548986887\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp",
"559550912603512850\t4/20/2016 2:06PM\t2016-04-20T21:06:53Z\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t"
+ "Info\tsshd\tsubsystem request for sftp"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.singletonMap("CUSTOM_TIMESTAMP", "%{MONTHNUM}/%{MONTHDAY}/%{YEAR} %{HOUR}:%{MINUTE}(?:AM|PM)"),
NOOP_TIMEOUT_CHECKER
);
grokPatternCreator.validateFullLineGrokPattern(grokPattern, timestampField);
assertEquals(9, mappings.size());
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("serial_no"));
Map<String, String> expectedDateMapping = new HashMap<>();
expectedDateMapping.put(TextStructureUtils.MAPPING_TYPE_SETTING, "date");
expectedDateMapping.put(TextStructureUtils.MAPPING_FORMAT_SETTING, "iso8601");
assertEquals(expectedDateMapping, mappings.get("utc_timestamp"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("user_id"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("host"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("client_ip"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("method"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("severity"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("program"));
assertEquals(Collections.singletonMap(TextStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("message"));
}
public void testValidateFullLineGrokPatternGivenInvalid() {
String timestampField = "utc_timestamp";
String grokPattern = "%{INT:serial_no}\\t%{TIMESTAMP_ISO8601:local_timestamp}\\t%{TIMESTAMP_ISO8601:utc_timestamp}\\t"
+ "%{INT:user_id}\\t%{HOSTNAME:host}\\t%{IP:client_ip}\\t%{WORD:method}\\t%{LOGLEVEL:severity}\\t%{PROG:program}\\t"
+ "%{GREEDYDATA:message}";
Collection<String> sampleMessages = Arrays.asList(
"Sep 8 11:55:06 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'elastic.slack.com/A/IN': 95.110.64.205#53",
"Sep 8 11:55:08 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'slack-imgs.com/A/IN': 95.110.64.205#53",
"Sep 8 11:55:35 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'www.elastic.co/A/IN': 95.110.68.206#53",
"Sep 8 11:55:42 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'b.akamaiedge.net/A/IN': 95.110.64.205#53"
);
Map<String, Object> mappings = new HashMap<>();
GrokPatternCreator grokPatternCreator = new GrokPatternCreator(
explanation,
sampleMessages,
mappings,
null,
Collections.emptyMap(),
NOOP_TIMEOUT_CHECKER
);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> grokPatternCreator.validateFullLineGrokPattern(grokPattern, timestampField)
);
assertEquals("Supplied Grok pattern [" + grokPattern + "] does not match sample messages", e.getMessage());
}
}
| GlenRSmith/elasticsearch | x-pack/plugin/text-structure/src/test/java/org/elasticsearch/xpack/textstructure/structurefinder/GrokPatternCreatorTests.java | Java | apache-2.0 | 37,348 |
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.models.guided.dtree.shared.model.nodes;
public interface TypeNode extends BoundNode {
String getClassName();
void setClassName( final String className );
}
| ThiagoGarciaAlves/drools | drools-workbench-models/drools-workbench-models-guided-dtree/src/main/java/org/drools/workbench/models/guided/dtree/shared/model/nodes/TypeNode.java | Java | apache-2.0 | 791 |
/*
* Copyright 2011 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.utils;
import static org.eclipse.jgit.lib.Constants.encode;
import static org.eclipse.jgit.lib.Constants.encodeASCII;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.text.MessageFormat;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.diff.RawText;
import org.eclipse.jgit.util.RawParseUtils;
import com.gitblit.models.PathModel.PathChangeModel;
import com.gitblit.utils.DiffUtils.DiffStat;
/**
* Generates an html snippet of a diff in Gitblit's style, tracks changed paths,
* and calculates diff stats.
*
* @author James Moger
*
*/
public class GitBlitDiffFormatter extends DiffFormatter {
private final OutputStream os;
private final DiffStat diffStat;
private PathChangeModel currentPath;
private int left, right;
public GitBlitDiffFormatter(OutputStream os, String commitId) {
super(os);
this.os = os;
this.diffStat = new DiffStat(commitId);
}
@Override
public void format(DiffEntry ent) throws IOException {
currentPath = diffStat.addPath(ent);
super.format(ent);
}
/**
* Output a hunk header
*
* @param aStartLine
* within first source
* @param aEndLine
* within first source
* @param bStartLine
* within second source
* @param bEndLine
* within second source
* @throws IOException
*/
@Override
protected void writeHunkHeader(int aStartLine, int aEndLine, int bStartLine, int bEndLine)
throws IOException {
os.write("<tr><th>..</th><th>..</th><td class='hunk_header'>".getBytes());
os.write('@');
os.write('@');
writeRange('-', aStartLine + 1, aEndLine - aStartLine);
writeRange('+', bStartLine + 1, bEndLine - bStartLine);
os.write(' ');
os.write('@');
os.write('@');
os.write("</td></tr>\n".getBytes());
left = aStartLine + 1;
right = bStartLine + 1;
}
protected void writeRange(final char prefix, final int begin, final int cnt) throws IOException {
os.write(' ');
os.write(prefix);
switch (cnt) {
case 0:
// If the range is empty, its beginning number must
// be the
// line just before the range, or 0 if the range is
// at the
// start of the file stream. Here, begin is always 1
// based,
// so an empty file would produce "0,0".
//
os.write(encodeASCII(begin - 1));
os.write(',');
os.write('0');
break;
case 1:
// If the range is exactly one line, produce only
// the number.
//
os.write(encodeASCII(begin));
break;
default:
os.write(encodeASCII(begin));
os.write(',');
os.write(encodeASCII(cnt));
break;
}
}
@Override
protected void writeLine(final char prefix, final RawText text, final int cur)
throws IOException {
// update entry diffstat
currentPath.update(prefix);
// output diff
os.write("<tr>".getBytes());
switch (prefix) {
case '+':
os.write(("<th></th><th>" + (right++) + "</th>").getBytes());
os.write("<td><div class=\"diff add2\">".getBytes());
break;
case '-':
os.write(("<th>" + (left++) + "</th><th></th>").getBytes());
os.write("<td><div class=\"diff remove2\">".getBytes());
break;
default:
os.write(("<th>" + (left++) + "</th><th>" + (right++) + "</th>").getBytes());
os.write("<td>".getBytes());
break;
}
os.write(prefix);
String line = text.getString(cur);
line = StringUtils.escapeForHtml(line, false);
os.write(encode(line));
switch (prefix) {
case '+':
case '-':
os.write("</div>".getBytes());
break;
default:
os.write("</td>".getBytes());
}
os.write("</tr>\n".getBytes());
}
/**
* Workaround function for complex private methods in DiffFormatter. This
* sets the html for the diff headers.
*
* @return
*/
public String getHtml() {
ByteArrayOutputStream bos = (ByteArrayOutputStream) os;
String html = RawParseUtils.decode(bos.toByteArray());
String[] lines = html.split("\n");
StringBuilder sb = new StringBuilder();
boolean inFile = false;
String oldnull = "a/dev/null";
for (String line : lines) {
if (line.startsWith("index")) {
// skip index lines
} else if (line.startsWith("new file")) {
// skip new file lines
} else if (line.startsWith("\\ No newline")) {
// skip no new line
} else if (line.startsWith("---") || line.startsWith("+++")) {
// skip --- +++ lines
} else if (line.startsWith("diff")) {
line = StringUtils.convertOctal(line);
if (line.indexOf(oldnull) > -1) {
// a is null, use b
line = line.substring(("diff --git " + oldnull).length()).trim();
// trim b/
line = line.substring(2).trim();
} else {
// use a
line = line.substring("diff --git ".length()).trim();
line = line.substring(line.startsWith("\"a/") ? 3 : 2);
line = line.substring(0, line.indexOf(" b/") > -1 ? line.indexOf(" b/") : line.indexOf("\"b/")).trim();
}
if (line.charAt(0) == '"') {
line = line.substring(1);
}
if (line.charAt(line.length() - 1) == '"') {
line = line.substring(0, line.length() - 1);
}
if (inFile) {
sb.append("</tbody></table></div>\n");
inFile = false;
}
sb.append(MessageFormat.format("<div class='header'><div class=\"diffHeader\" id=\"{0}\"><i class=\"icon-file\"></i> ", line)).append(line).append("</div></div>");
sb.append("<div class=\"diff\">");
sb.append("<table><tbody>");
inFile = true;
} else {
boolean gitLinkDiff = line.length() > 0 && line.substring(1).startsWith("Subproject commit");
if (gitLinkDiff) {
sb.append("<tr><th></th><th></th>");
if (line.charAt(0) == '+') {
sb.append("<td><div class=\"diff add2\">");
} else {
sb.append("<td><div class=\"diff remove2\">");
}
}
sb.append(line);
if (gitLinkDiff) {
sb.append("</div></td></tr>");
}
}
}
sb.append("</table></div>");
return sb.toString();
}
public DiffStat getDiffStat() {
return diffStat;
}
}
| culmat/gitblit | src/main/java/com/gitblit/utils/GitBlitDiffFormatter.java | Java | apache-2.0 | 6,842 |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.coordinator.rules;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.MinMaxPriorityQueue;
import com.metamx.common.IAE;
import com.metamx.emitter.EmittingLogger;
import io.druid.server.coordinator.BalancerStrategy;
import io.druid.server.coordinator.CoordinatorStats;
import io.druid.server.coordinator.DruidCoordinator;
import io.druid.server.coordinator.DruidCoordinatorRuntimeParams;
import io.druid.server.coordinator.LoadPeonCallback;
import io.druid.server.coordinator.ReplicationThrottler;
import io.druid.server.coordinator.ServerHolder;
import io.druid.timeline.DataSegment;
import org.joda.time.DateTime;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* LoadRules indicate the number of replicants a segment should have in a given tier.
*/
public abstract class LoadRule implements Rule
{
private static final EmittingLogger log = new EmittingLogger(LoadRule.class);
private static final String assignedCount = "assignedCount";
private static final String droppedCount = "droppedCount";
@Override
public CoordinatorStats run(DruidCoordinator coordinator, DruidCoordinatorRuntimeParams params, DataSegment segment)
{
final CoordinatorStats stats = new CoordinatorStats();
final Set<DataSegment> availableSegments = params.getAvailableSegments();
final Map<String, Integer> loadStatus = Maps.newHashMap();
int totalReplicantsInCluster = params.getSegmentReplicantLookup().getTotalReplicants(segment.getIdentifier());
for (Map.Entry<String, Integer> entry : getTieredReplicants().entrySet()) {
final String tier = entry.getKey();
final int expectedReplicantsInTier = entry.getValue();
final int totalReplicantsInTier = params.getSegmentReplicantLookup()
.getTotalReplicants(segment.getIdentifier(), tier);
final int loadedReplicantsInTier = params.getSegmentReplicantLookup()
.getLoadedReplicants(segment.getIdentifier(), tier);
final MinMaxPriorityQueue<ServerHolder> serverQueue = params.getDruidCluster().getServersByTier(tier);
if (serverQueue == null) {
log.makeAlert("Tier[%s] has no servers! Check your cluster configuration!", tier).emit();
continue;
}
final List<ServerHolder> serverHolderList = Lists.newArrayList(serverQueue);
final DateTime referenceTimestamp = params.getBalancerReferenceTimestamp();
final BalancerStrategy strategy = params.getBalancerStrategyFactory().createBalancerStrategy(referenceTimestamp);
if (availableSegments.contains(segment)) {
CoordinatorStats assignStats = assign(
params.getReplicationManager(),
tier,
totalReplicantsInCluster,
expectedReplicantsInTier,
totalReplicantsInTier,
strategy,
serverHolderList,
segment
);
stats.accumulate(assignStats);
totalReplicantsInCluster += assignStats.getPerTierStats().get(assignedCount).get(tier).get();
}
loadStatus.put(tier, expectedReplicantsInTier - loadedReplicantsInTier);
}
// Remove over-replication
stats.accumulate(drop(loadStatus, segment, params));
return stats;
}
private CoordinatorStats assign(
final ReplicationThrottler replicationManager,
final String tier,
final int totalReplicantsInCluster,
final int expectedReplicantsInTier,
final int totalReplicantsInTier,
final BalancerStrategy strategy,
final List<ServerHolder> serverHolderList,
final DataSegment segment
)
{
final CoordinatorStats stats = new CoordinatorStats();
stats.addToTieredStat(assignedCount, tier, 0);
int currReplicantsInTier = totalReplicantsInTier;
int currTotalReplicantsInCluster = totalReplicantsInCluster;
while (currReplicantsInTier < expectedReplicantsInTier) {
boolean replicate = currTotalReplicantsInCluster > 0;
if (replicate && !replicationManager.canCreateReplicant(tier)) {
break;
}
final ServerHolder holder = strategy.findNewSegmentHomeReplicator(segment, serverHolderList);
if (holder == null) {
log.warn(
"Not enough [%s] servers or node capacity to assign segment[%s]! Expected Replicants[%d]",
tier,
segment.getIdentifier(),
expectedReplicantsInTier
);
break;
}
if (replicate) {
replicationManager.registerReplicantCreation(
tier, segment.getIdentifier(), holder.getServer().getHost()
);
}
holder.getPeon().loadSegment(
segment,
new LoadPeonCallback()
{
@Override
public void execute()
{
replicationManager.unregisterReplicantCreation(
tier,
segment.getIdentifier(),
holder.getServer().getHost()
);
}
}
);
stats.addToTieredStat(assignedCount, tier, 1);
++currReplicantsInTier;
++currTotalReplicantsInCluster;
}
return stats;
}
private CoordinatorStats drop(
final Map<String, Integer> loadStatus,
final DataSegment segment,
final DruidCoordinatorRuntimeParams params
)
{
CoordinatorStats stats = new CoordinatorStats();
// Make sure we have enough loaded replicants in the correct tiers in the cluster before doing anything
for (Integer leftToLoad : loadStatus.values()) {
if (leftToLoad > 0) {
return stats;
}
}
final ReplicationThrottler replicationManager = params.getReplicationManager();
// Find all instances of this segment across tiers
Map<String, Integer> replicantsByTier = params.getSegmentReplicantLookup().getClusterTiers(segment.getIdentifier());
for (Map.Entry<String, Integer> entry : replicantsByTier.entrySet()) {
final String tier = entry.getKey();
int loadedNumReplicantsForTier = entry.getValue();
int expectedNumReplicantsForTier = getNumReplicants(tier);
stats.addToTieredStat(droppedCount, tier, 0);
MinMaxPriorityQueue<ServerHolder> serverQueue = params.getDruidCluster().get(tier);
if (serverQueue == null) {
log.makeAlert("No holders found for tier[%s]", entry.getKey()).emit();
continue;
}
List<ServerHolder> droppedServers = Lists.newArrayList();
while (loadedNumReplicantsForTier > expectedNumReplicantsForTier) {
final ServerHolder holder = serverQueue.pollLast();
if (holder == null) {
log.warn("Wtf, holder was null? I have no servers serving [%s]?", segment.getIdentifier());
break;
}
if (holder.isServingSegment(segment)) {
if (expectedNumReplicantsForTier > 0) { // don't throttle unless we are removing extra replicants
if (!replicationManager.canDestroyReplicant(tier)) {
serverQueue.add(holder);
break;
}
replicationManager.registerReplicantTermination(
tier,
segment.getIdentifier(),
holder.getServer().getHost()
);
}
holder.getPeon().dropSegment(
segment,
new LoadPeonCallback()
{
@Override
public void execute()
{
replicationManager.unregisterReplicantTermination(
tier,
segment.getIdentifier(),
holder.getServer().getHost()
);
}
}
);
--loadedNumReplicantsForTier;
stats.addToTieredStat(droppedCount, tier, 1);
}
droppedServers.add(holder);
}
serverQueue.addAll(droppedServers);
}
return stats;
}
protected void validateTieredReplicants(Map<String, Integer> tieredReplicants){
if(tieredReplicants.size() == 0)
throw new IAE("A rule with empty tiered replicants is invalid");
for (Map.Entry<String, Integer> entry: tieredReplicants.entrySet()) {
if (entry.getValue() == null)
throw new IAE("Replicant value cannot be empty");
if (entry.getValue() < 0)
throw new IAE("Replicant value [%d] is less than 0, which is not allowed", entry.getValue());
}
}
public abstract Map<String, Integer> getTieredReplicants();
public abstract int getNumReplicants(String tier);
}
| tubemogul/druid | server/src/main/java/io/druid/server/coordinator/rules/LoadRule.java | Java | apache-2.0 | 9,453 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.deltaspike.core.util;
import org.apache.deltaspike.core.spi.activation.Deactivatable;
import javax.enterprise.inject.Typed;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ServiceLoader;
import java.util.logging.Logger;
/**
* Allows handling the lookup (with fallbacks) in a central place.
* See DELTASPIKE-97
*/
@Typed()
public abstract class ServiceUtils
{
private static final Logger LOG = Logger.getLogger(ServiceUtils.class.getName());
private ServiceUtils()
{
// prevent instantiation
}
public static <T> List<T> loadServiceImplementations(Class<T> serviceType)
{
return loadServiceImplementations(serviceType, false);
}
public static <T> List<T> loadServiceImplementations(Class<T> serviceType,
boolean ignoreServicesWithMissingDependencies)
{
List<T> result = new ArrayList<T>();
Iterator<T> servicesIterator = ServiceLoader.load(serviceType).iterator();
if (!servicesIterator.hasNext())
{
ClassLoader fallbackClassLoader = ServiceUtils.class.getClassLoader();
servicesIterator = ServiceLoader.load(serviceType, fallbackClassLoader).iterator();
}
while (servicesIterator.hasNext())
{
try
{
T service = servicesIterator.next();
if (service instanceof Deactivatable &&
!ClassDeactivationUtils.isActivated((Class<? extends Deactivatable>) service.getClass()))
{
LOG.info("deactivated service: " + service.getClass().getName());
continue;
}
result.add(service);
}
catch (Throwable t)
{
if (!ignoreServicesWithMissingDependencies)
{
throw ExceptionUtils.throwAsRuntimeException(t);
}
else
{
LOG.info("service filtered - caused by " + t.getMessage());
}
}
}
return result;
}
}
| rdicroce/deltaspike | deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/ServiceUtils.java | Java | apache-2.0 | 3,022 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jena;
import static org.apache.jena.atlas.logging.LogCtl.setCmdLogging;
import java.net.*;
import java.io.*;
import org.apache.jena.rdf.model.* ;
import org.apache.jena.shared.JenaException ;
/** A program which read an RDF model and copy it to the standard output stream.
*
* <p>This program will read an RDF model, in a variety of languages,
* and copy it to the output stream in a possibly different language.
* Input can be read either from a URL or from a file.
* The program writes its results to the standard output stream and sets
* its exit code to 0 if the program terminate normally, and
* to -1 if it encounters an error.</p>
*
* <p></p>
*
* <pre>java jena.rdfcopy model [inlang [outlang]]
*
* model1 and model2 can be file names or URL's
* inlang and outlang specify the language of the input and output
* respectively and can be:
* RDF/XML
* N-TRIPLE
* TURTLE
* N3
* The input language defaults to RDF/XML and the output language
* defaults to N-TRIPLE.
* </pre>
*/
public class rdfcopy extends java.lang.Object {
static { setCmdLogging(); }
/**
* @param args the command line arguments
*/
public static void main(String ... args) {
if ( ( args.length < 1 ) || ( "-h".equals(args[0]) ) ) {
usage();
System.exit(-1);
}
String in = args[0];
String inlang = "RDF/XML";
int j;
for (j = 1; j < args.length && args[j].contains( "=" ); j++)
{}
int lastInProp = j;
if (j < args.length) {
inlang = args[j];
}
j++;
String outlang = "N-TRIPLE";
for (; j < args.length && args[j].contains( "=" ); j++)
{}
int lastOutProp = j;
if (j < args.length) {
outlang = args[j];
}
if (j + 1 < args.length) {
// System.err.println(j+"<<"+args.length);
usage();
System.exit(-1);
}
try {
Model m = ModelFactory.createDefaultModel();
String base = in ;
RDFReader rdr = m.getReader(inlang);
for (j = 1; j < lastInProp; j++) {
int eq = args[j].indexOf("=");
rdr.setProperty(
args[j].substring(0, eq),
args[j].substring(eq + 1));
}
try {
rdr.read(m, in);
} catch (JenaException ex)
{
if ( ! ( ex.getCause() instanceof MalformedURLException ) )
throw ex ;
// Tried as a URL. Try as a file name.
// Make absolute
File f = new File(in) ;
base = "file:///"+f.getCanonicalPath().replace('\\','/') ;
rdr.read(m, new FileInputStream(in), base) ;
}
RDFWriter w = m.getWriter(outlang);
j++;
for (; j < lastOutProp; j++) {
int eq = args[j].indexOf("=");
w.setProperty(
args[j].substring(0, eq),
args[j].substring(eq + 1));
}
w.write(m, System.out, null) ;
System.exit(0);
} catch (Exception e) {
System.err.println("Unhandled exception:");
System.err.println(" " + e.toString());
System.exit(-1);
}
}
protected static void usage() {
System.err.println("usage:");
System.err.println(" java jena.rdfcopy in {inprop=inval}* [ inlang {outprop=outval}* outlang]]");
System.err.println();
System.err.println(" in can be a URL or a filename");
System.err.println(" inlang and outlang can take values:");
System.err.println(" RDF/XML");
System.err.println(" RDF/XML-ABBREV");
System.err.println(" N-TRIPLE");
System.err.println(" TURTLE");
System.err.println(" N3");
System.err.println(
" inlang defaults to RDF/XML, outlang to N-TRIPLE");
System.err.println(" The legal values for inprop and outprop depend on inlang and outlang.");
System.err.println(" The legal values for inval and outval depend on inprop and outprop.");
System.err.println();
}
protected static void read(Model model, String in, String lang)
throws java.io.FileNotFoundException {
try {
URL url = new URL(in);
model.read(in, lang);
} catch (java.net.MalformedURLException e) {
model.read(new FileInputStream(in), "", lang);
}
}
}
| kidaa/jena | jena-core/src/main/java/jena/rdfcopy.java | Java | apache-2.0 | 5,015 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
let nativeModules = {}
// for testing
/**
* for testing
*/
export function getModule (moduleName) {
return nativeModules[moduleName]
}
/**
* for testing
*/
export function clearModules () {
nativeModules = {}
}
// for framework
/**
* init modules for an app instance
* the second param determines whether to replace an existed method
*/
export function initModules (modules, ifReplace) {
for (const moduleName in modules) {
// init `modules[moduleName][]`
let methods = nativeModules[moduleName]
if (!methods) {
methods = {}
nativeModules[moduleName] = methods
}
// push each non-existed new method
modules[moduleName].forEach(function (method) {
if (typeof method === 'string') {
method = {
name: method
}
}
if (!methods[method.name] || ifReplace) {
methods[method.name] = method
}
})
}
}
/**
* init app methods
*/
export function initMethods (Vm, apis) {
const p = Vm.prototype
for (const apiName in apis) {
if (!p.hasOwnProperty(apiName)) {
p[apiName] = apis[apiName]
}
}
}
/**
* get a module of methods for an app instance
*/
export function requireModule (app, name) {
const methods = nativeModules[name]
const target = {}
for (const methodName in methods) {
Object.defineProperty(target, methodName, {
configurable: true,
enumerable: true,
get: function moduleGetter () {
return (...args) => app.callTasks({
module: name,
method: methodName,
args: args
})
},
set: function moduleSetter (value) {
if (typeof value === 'function') {
return app.callTasks({
module: name,
method: methodName,
args: [value]
})
}
}
})
}
return target
}
/**
* get a custom component options
*/
export function requireCustomComponent (app, name) {
const { customComponentMap } = app
return customComponentMap[name]
}
/**
* register a custom component options
*/
export function registerCustomComponent (app, name, def) {
const { customComponentMap } = app
if (customComponentMap[name]) {
console.error(`[JS Framework] define a component(${name}) that already exists`)
return
}
customComponentMap[name] = def
}
| cxfeng1/incubator-weex | runtime/frameworks/legacy/app/register.js | JavaScript | apache-2.0 | 3,146 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Net;
using System.Net.Http;
using Microsoft.WindowsAzure.Commands.Common;
using Microsoft.WindowsAzure.Commands.Utilities.Common;
using Microsoft.Azure.Commands.Common.Authentication;
using Microsoft.Azure.Commands.Common.Authentication.Factories;
namespace Microsoft.WindowsAzure.Commands.Utilities.Websites
{
public abstract class KuduRemoteClientBase
{
/// <summary>
/// Parameterless constructor for mocking
/// </summary>
protected KuduRemoteClientBase()
{
}
protected KuduRemoteClientBase(
string serviceUrl,
ICredentials credentials = null,
HttpMessageHandler handler = null)
{
if (serviceUrl == null)
{
throw new ArgumentNullException("serviceUrl");
}
ServiceUrl = GeneralUtilities.EnsureTrailingSlash(serviceUrl);
Credentials = credentials;
if (credentials != null)
{
Client = AzureSession.Instance.ClientFactory.CreateHttpClient(serviceUrl, ClientFactory.CreateHttpClientHandler(serviceUrl, credentials));
}
else
{
Client = AzureSession.Instance.ClientFactory.CreateHttpClient(serviceUrl, handler);
}
}
public string ServiceUrl { get; private set; }
public ICredentials Credentials { get; private set; }
public HttpClient Client { get; private set; }
}
}
| atpham256/azure-powershell | src/ServiceManagement/Services/Commands.Utilities/Websites/KuduRemoteClientBase.cs | C# | apache-2.0 | 2,292 |
package com.tutsplus.vectordrawables;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | biwoodfengs/AndroidDemoProjects | VectorDrawables/app/src/androidTest/java/com/tutsplus/vectordrawables/ApplicationTest.java | Java | apache-2.0 | 359 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.collect.EvictingQueue;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationReduceContext;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
public class SerialDiffPipelineAggregator extends PipelineAggregator {
private DocValueFormat formatter;
private GapPolicy gapPolicy;
private int lag;
SerialDiffPipelineAggregator(
String name,
String[] bucketsPaths,
@Nullable DocValueFormat formatter,
GapPolicy gapPolicy,
int lag,
Map<String, Object> metadata
) {
super(name, bucketsPaths, metadata);
this.formatter = formatter;
this.gapPolicy = gapPolicy;
this.lag = lag;
}
@Override
public InternalAggregation reduce(InternalAggregation aggregation, AggregationReduceContext reduceContext) {
@SuppressWarnings("rawtypes")
InternalMultiBucketAggregation<
? extends InternalMultiBucketAggregation,
? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation<
? extends InternalMultiBucketAggregation,
? extends InternalMultiBucketAggregation.InternalBucket>) aggregation;
List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
HistogramFactory factory = (HistogramFactory) histo;
List<Bucket> newBuckets = new ArrayList<>();
EvictingQueue<Double> lagWindow = new EvictingQueue<>(lag);
int counter = 0;
for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy);
Bucket newBucket = bucket;
counter += 1;
// Still under the initial lag period, add nothing and move on
Double lagValue;
if (counter <= lag) {
lagValue = Double.NaN;
} else {
lagValue = lagWindow.peek(); // Peek here, because we rely on add'ing to always move the window
}
// Normalize null's to NaN
if (thisBucketValue == null) {
thisBucketValue = Double.NaN;
}
// Both have values, calculate diff and replace the "empty" bucket
if (Double.isNaN(thisBucketValue) == false && Double.isNaN(lagValue) == false) {
double diff = thisBucketValue - lagValue;
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false)
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toList());
aggs.add(new InternalSimpleValue(name(), diff, formatter, metadata()));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
}
newBuckets.add(newBucket);
lagWindow.add(thisBucketValue);
}
return factory.createAggregation(newBuckets);
}
}
| GlenRSmith/elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java | Java | apache-2.0 | 4,241 |
std_trap = trap("INT") { exit! 130 } # no backtrace thanks
require "pathname"
HOMEBREW_LIBRARY_PATH = Pathname.new(__FILE__).realpath.parent.join("Homebrew")
$:.unshift(HOMEBREW_LIBRARY_PATH.to_s)
require "global"
if ARGV == %w[--version] || ARGV == %w[-v]
puts "Homebrew #{Homebrew.homebrew_version_string}"
puts "Homebrew/homebrew-core #{Homebrew.core_tap_version_string}"
exit 0
end
if OS.mac? && MacOS.version < "10.6"
abort <<-EOABORT.undent
Homebrew requires Snow Leopard or higher. For Tiger and Leopard support, see:
https://github.com/mistydemeo/tigerbrew
EOABORT
end
def require?(path)
require path
rescue LoadError => e
# HACK: ( because we should raise on syntax errors but
# not if the file doesn't exist. TODO make robust!
raise unless e.to_s.include? path
end
begin
trap("INT", std_trap) # restore default CTRL-C handler
empty_argv = ARGV.empty?
help_flag_list = %w[-h --help --usage -? help]
help_flag = false
internal_cmd = true
cmd = nil
ARGV.dup.each_with_index do |arg, i|
if help_flag && cmd
break
elsif help_flag_list.include? arg
help_flag = true
elsif !cmd
cmd = ARGV.delete_at(i)
end
end
# Add contributed commands to PATH before checking.
Dir["#{HOMEBREW_LIBRARY}/Taps/*/*/cmd"].each do |tap_cmd_dir|
ENV["PATH"] += "#{File::PATH_SEPARATOR}#{tap_cmd_dir}"
end
# Add SCM wrappers.
ENV["PATH"] += "#{File::PATH_SEPARATOR}#{HOMEBREW_SHIMS_PATH}/scm"
if cmd
internal_cmd = require? HOMEBREW_LIBRARY_PATH.join("cmd", cmd)
if !internal_cmd && ARGV.homebrew_developer?
internal_cmd = require? HOMEBREW_LIBRARY_PATH.join("dev-cmd", cmd)
end
end
# Usage instructions should be displayed if and only if one of:
# - a help flag is passed AND an internal command is matched
# - a help flag is passed AND there is no command specified
# - no arguments are passed
#
# It should never affect external commands so they can handle usage
# arguments themselves.
if empty_argv || (help_flag && (cmd.nil? || internal_cmd))
# TODO: - `brew help cmd` should display subcommand help
require "cmd/help"
if empty_argv
$stderr.puts ARGV.usage
else
puts ARGV.usage
end
exit ARGV.any? ? 0 : 1
end
if internal_cmd
Homebrew.send cmd.to_s.tr("-", "_").downcase
elsif which "brew-#{cmd}"
%w[CACHE LIBRARY_PATH].each do |e|
ENV["HOMEBREW_#{e}"] = Object.const_get("HOMEBREW_#{e}").to_s
end
exec "brew-#{cmd}", *ARGV
elsif (path = which("brew-#{cmd}.rb")) && require?(path)
exit Homebrew.failed? ? 1 : 0
else
require "tap"
possible_tap = case cmd
when "brewdle", "brewdler", "bundle", "bundler"
Tap.fetch("Homebrew", "bundle")
when "cask"
Tap.fetch("caskroom", "cask")
when "services"
Tap.fetch("Homebrew", "services")
end
if possible_tap && !possible_tap.installed?
brew_uid = HOMEBREW_BREW_FILE.stat.uid
tap_commands = []
if Process.uid.zero? && !brew_uid.zero?
tap_commands += %W[/usr/bin/sudo -u ##{brew_uid}]
end
tap_commands += %W[#{HOMEBREW_BREW_FILE} tap #{possible_tap}]
safe_system *tap_commands
exec HOMEBREW_BREW_FILE, cmd, *ARGV
else
onoe "Unknown command: #{cmd}"
exit 1
end
end
rescue FormulaUnspecifiedError
abort "This command requires a formula argument"
rescue KegUnspecifiedError
abort "This command requires a keg argument"
rescue UsageError
onoe "Invalid usage"
abort ARGV.usage
rescue SystemExit => e
onoe "Kernel.exit" if ARGV.verbose? && !e.success?
$stderr.puts e.backtrace if ARGV.debug?
raise
rescue Interrupt => e
$stderr.puts # seemingly a newline is typical
exit 130
rescue BuildError => e
e.dump
exit 1
rescue RuntimeError, SystemCallError => e
raise if e.message.empty?
onoe e
$stderr.puts e.backtrace if ARGV.debug?
exit 1
rescue Exception => e
onoe e
if internal_cmd
$stderr.puts "#{Tty.white}Please report this bug:"
$stderr.puts " #{Tty.em}#{OS::ISSUES_URL}#{Tty.reset}"
end
$stderr.puts e.backtrace
exit 1
else
exit 1 if Homebrew.failed?
end
| joshfriend/homebrew | Library/brew.rb | Ruby | bsd-2-clause | 4,159 |
// Copyright Louis Dionne 2013-2016
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt)
#include <boost/hana/and.hpp>
#include <boost/hana/assert.hpp>
#include <boost/hana/bool.hpp>
namespace hana = boost::hana;
BOOST_HANA_CONSTANT_CHECK(hana::and_(hana::true_c, hana::true_c, hana::true_c, hana::true_c));
static_assert(!hana::and_(hana::true_c, false, hana::true_c, hana::true_c), "");
int main() { }
| bureau14/qdb-benchmark | thirdparty/boost/libs/hana/example/and.cpp | C++ | bsd-2-clause | 499 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class BigJsPageSet(page_set_module.PageSet):
""" Sites which load and run big JavaScript files."""
def __init__(self):
super(BigJsPageSet, self).__init__(
archive_data_file='data/big_js.json',
bucket=page_set_module.PARTNER_BUCKET,
user_agent_type='desktop')
# Page sets with only one page don't work well, since we end up reusing a
# renderer all the time and it keeps its memory caches alive (see
# crbug.com/403735). Add a dummy second page here.
urls_list = [
'http://beta.unity3d.com/jonas/DT2/',
'http://www.foo.com',
]
for url in urls_list:
self.AddUserStory(page_module.Page(url, self))
| markYoungH/chromium.src | tools/perf/page_sets/big_js.py | Python | bsd-3-clause | 935 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "remoting/host/user_authenticator.h"
#include <Security/Security.h>
#include <string>
#include "base/basictypes.h"
#include "base/mac/mac_logging.h"
namespace remoting {
namespace {
class UserAuthenticatorMac : public UserAuthenticator {
public:
UserAuthenticatorMac() {}
virtual ~UserAuthenticatorMac() {}
virtual bool Authenticate(const std::string& username,
const std::string& password);
private:
DISALLOW_COPY_AND_ASSIGN(UserAuthenticatorMac);
};
const char kAuthorizationRightName[] = "system.login.tty";
bool UserAuthenticatorMac::Authenticate(const std::string& username,
const std::string& password) {
// The authorization right being requested. This particular right allows
// testing of a username/password, as if the user were logging on to the
// system locally.
AuthorizationItem right;
right.name = kAuthorizationRightName;
right.valueLength = 0;
right.value = NULL;
right.flags = 0;
AuthorizationRights rights;
rights.count = 1;
rights.items = &right;
// Passing the username/password as an "environment" parameter causes these
// to be submitted to the Security Framework, instead of the interactive
// password prompt appearing on the host system. Valid on OS X 10.4 and
// later versions.
AuthorizationItem environment_items[2];
environment_items[0].name = kAuthorizationEnvironmentUsername;
environment_items[0].valueLength = username.size();
environment_items[0].value = const_cast<char*>(username.data());
environment_items[0].flags = 0;
environment_items[1].name = kAuthorizationEnvironmentPassword;
environment_items[1].valueLength = password.size();
environment_items[1].value = const_cast<char*>(password.data());
environment_items[1].flags = 0;
AuthorizationEnvironment environment;
environment.count = 2;
environment.items = environment_items;
OSStatus status = AuthorizationCreate(&rights, &environment,
kAuthorizationFlagExtendRights,
NULL);
switch (status) {
case errAuthorizationSuccess:
return true;
case errAuthorizationDenied:
return false;
default:
OSSTATUS_LOG(ERROR, status) << "AuthorizationCreate";
return false;
}
}
} // namespace
// static
UserAuthenticator* UserAuthenticator::Create() {
return new UserAuthenticatorMac();
}
} // namespace remoting
| aYukiSekiguchi/ACCESS-Chromium | remoting/host/user_authenticator_mac.cc | C++ | bsd-3-clause | 2,647 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function() {
// We are going to kill all of the builtins, so hold onto the ones we need.
var defineGetter = Object.prototype.__defineGetter__;
var defineSetter = Object.prototype.__defineSetter__;
var Error = window.Error;
var forEach = Array.prototype.forEach;
var push = Array.prototype.push;
var hasOwnProperty = Object.prototype.hasOwnProperty;
var getOwnPropertyNames = Object.getOwnPropertyNames;
var stringify = JSON.stringify;
// Kill all of the builtins functions to give us a fairly high confidence that
// the environment our bindings run in can't interfere with our code.
// These are taken from the ECMAScript spec.
var builtinTypes = [
Object, Function, Array, String, Boolean, Number, Math, Date, RegExp, JSON,
];
function clobber(obj, name, qualifiedName) {
// Clobbering constructors would break everything.
// Clobbering toString is annoying.
// Clobbering __proto__ breaks in ways that grep can't find.
// Clobbering function name will break because
// SafeBuiltins does not support getters yet. See crbug.com/463526.
// Clobbering Function.call would make it impossible to implement these tests.
// Clobbering Object.valueOf breaks v8.
// Clobbering %FunctionPrototype%.caller and .arguments will break because
// these properties are poisoned accessors in ES6.
if (name == 'constructor' ||
name == 'toString' ||
name == '__proto__' ||
name == 'name' && typeof obj == 'function' ||
qualifiedName == 'Function.call' ||
(obj !== Function && qualifiedName == 'Function.caller') ||
(obj !== Function && qualifiedName == 'Function.arguments') ||
qualifiedName == 'Object.valueOf') {
return;
}
if (typeof obj[name] == 'function') {
obj[name] = function() {
throw new Error('Clobbered ' + qualifiedName + ' function');
};
} else {
defineGetter.call(obj, name, function() {
throw new Error('Clobbered ' + qualifiedName + ' getter');
});
}
}
forEach.call(builtinTypes, function(builtin) {
var prototype = builtin.prototype;
var typename = '<unknown>';
if (prototype) {
typename = prototype.constructor.name;
forEach.call(getOwnPropertyNames(prototype), function(name) {
clobber(prototype, name, typename + '.' + name);
});
}
forEach.call(getOwnPropertyNames(builtin), function(name) {
clobber(builtin, name, typename + '.' + name);
});
if (builtin.name)
clobber(window, builtin.name, 'window.' + builtin.name);
});
// Codes for test results. Must match ExternallyConnectableMessagingTest::Result
// in c/b/extensions/extension_messages_apitest.cc.
var results = {
OK: 0,
NAMESPACE_NOT_DEFINED: 1,
FUNCTION_NOT_DEFINED: 2,
COULD_NOT_ESTABLISH_CONNECTION_ERROR: 3,
OTHER_ERROR: 4,
INCORRECT_RESPONSE_SENDER: 5,
INCORRECT_RESPONSE_MESSAGE: 6,
};
// Make the messages sent vaguely complex, but unambiguously JSON-ifiable.
var kMessage = [{'a': {'b': 10}}, 20, 'c\x10\x11'];
// Our tab's location. Normally this would be our document's location but if
// we're an iframe it will be the location of the parent - in which case,
// expect to be told.
var tabLocationHref = null;
if (parent == window) {
tabLocationHref = document.location.href;
} else {
window.addEventListener('message', function listener(event) {
window.removeEventListener('message', listener);
tabLocationHref = event.data;
});
}
function checkLastError(reply) {
if (!chrome.runtime.lastError)
return true;
var kCouldNotEstablishConnection =
'Could not establish connection. Receiving end does not exist.';
if (chrome.runtime.lastError.message == kCouldNotEstablishConnection)
reply(results.COULD_NOT_ESTABLISH_CONNECTION_ERROR);
else
reply(results.OTHER_ERROR);
return false;
}
function checkResponse(response, reply, expectedMessage, isApp) {
// The response will be an echo of both the original message *and* the
// MessageSender (with the tab field stripped down).
//
// First check the sender was correct.
var incorrectSender = false;
if (!isApp) {
// Only extensions get access to a 'tab' property.
if (!hasOwnProperty.call(response.sender, 'tab')) {
console.warn('Expected a tab, got none');
incorrectSender = true;
}
if (response.sender.tab.url != tabLocationHref) {
console.warn('Expected tab url ' + tabLocationHref + ' got ' +
response.sender.tab.url);
incorrectSender = true;
}
}
if (hasOwnProperty.call(response.sender, 'id')) {
console.warn('Expected no id, got "' + response.sender.id + '"');
incorrectSender = true;
}
if (response.sender.url != document.location.href) {
console.warn('Expected url ' + document.location.href + ' got ' +
response.sender.url);
incorrectSender = true;
}
if (incorrectSender) {
reply(results.INCORRECT_RESPONSE_SENDER);
return false;
}
// Check the correct content was echoed.
var expectedJson = stringify(expectedMessage);
var actualJson = stringify(response.message);
if (actualJson == expectedJson)
return true;
console.warn('Expected message ' + expectedJson + ' got ' + actualJson);
reply(results.INCORRECT_RESPONSE_MESSAGE);
return false;
}
function sendToBrowser(msg) {
domAutomationController.send(msg);
}
function sendToBrowserForTlsChannelId(result) {
// Because the TLS channel ID tests read the TLS either an error code or the
// TLS channel ID string from the same value, they require the result code
// to be sent as a string.
// String() is clobbered, so coerce string creation with +.
sendToBrowser("" + result);
}
function checkRuntime(reply) {
if (!reply)
reply = sendToBrowser;
if (!chrome.runtime) {
reply(results.NAMESPACE_NOT_DEFINED);
return false;
}
if (!chrome.runtime.connect || !chrome.runtime.sendMessage) {
reply(results.FUNCTION_NOT_DEFINED);
return false;
}
return true;
}
function checkRuntimeForTlsChannelId() {
return checkRuntime(sendToBrowserForTlsChannelId);
}
function checkTlsChannelIdResponse(response) {
if (chrome.runtime.lastError) {
if (chrome.runtime.lastError.message == kCouldNotEstablishConnection)
sendToBrowserForTlsChannelId(
results.COULD_NOT_ESTABLISH_CONNECTION_ERROR);
else
sendToBrowserForTlsChannelId(results.OTHER_ERROR);
return;
}
if (response.sender.tlsChannelId !== undefined)
sendToBrowserForTlsChannelId(response.sender.tlsChannelId);
else
sendToBrowserForTlsChannelId('');
}
window.actions = {
appendIframe: function(src) {
var iframe = document.createElement('iframe');
// When iframe has loaded, notify it of our tab location (probably
// document.location) to use in its assertions, then continue.
iframe.addEventListener('load', function listener() {
iframe.removeEventListener('load', listener);
iframe.contentWindow.postMessage(tabLocationHref, '*');
sendToBrowser(true);
});
iframe.src = src;
document.body.appendChild(iframe);
}
};
window.assertions = {
canConnectAndSendMessages: function(extensionId, isApp, message) {
if (!checkRuntime())
return;
if (!message)
message = kMessage;
function canSendMessage(reply) {
chrome.runtime.sendMessage(extensionId, message, function(response) {
if (checkLastError(reply) &&
checkResponse(response, reply, message, isApp)) {
reply(results.OK);
}
});
}
function canConnectAndSendMessages(reply) {
var port = chrome.runtime.connect(extensionId);
port.postMessage(message, function() {
checkLastError(reply);
});
port.postMessage(message, function() {
checkLastError(reply);
});
var pendingResponses = 2;
var ok = true;
port.onMessage.addListener(function(response) {
pendingResponses--;
ok = ok && checkLastError(reply) &&
checkResponse(response, reply, message, isApp);
if (pendingResponses == 0 && ok)
reply(results.OK);
});
}
canSendMessage(function(result) {
if (result != results.OK)
sendToBrowser(result);
else
canConnectAndSendMessages(sendToBrowser);
});
},
trySendMessage: function(extensionId) {
chrome.runtime.sendMessage(extensionId, kMessage, function(response) {
// The result is unimportant. All that matters is the attempt.
});
},
tryIllegalArguments: function() {
// Tests that illegal arguments to messaging functions throw exceptions.
// Regression test for crbug.com/472700, where they crashed the renderer.
function runIllegalFunction(fun) {
try {
fun();
} catch(e) {
return true;
}
console.error('Function did not throw exception: ' + fun);
sendToBrowser(false);
return false;
}
var result =
runIllegalFunction(chrome.runtime.connect) &&
runIllegalFunction(function() {
chrome.runtime.connect('');
}) &&
runIllegalFunction(function() {
chrome.runtime.connect(42);
}) &&
runIllegalFunction(function() {
chrome.runtime.connect('', 42);
}) &&
runIllegalFunction(function() {
chrome.runtime.connect({name: 'noname'});
}) &&
runIllegalFunction(chrome.runtime.sendMessage) &&
runIllegalFunction(function() {
chrome.runtime.sendMessage('');
}) &&
runIllegalFunction(function() {
chrome.runtime.sendMessage(42);
}) &&
runIllegalFunction(function() {
chrome.runtime.sendMessage('', 42);
}) &&
sendToBrowser(true);
},
areAnyRuntimePropertiesDefined: function(names) {
var result = false;
if (chrome.runtime) {
forEach.call(names, function(name) {
if (chrome.runtime[name]) {
console.log('runtime.' + name + ' is defined');
result = true;
}
});
}
sendToBrowser(result);
},
getTlsChannelIdFromPortConnect: function(extensionId, includeTlsChannelId,
message) {
if (!checkRuntimeForTlsChannelId())
return;
if (!message)
message = kMessage;
var port = chrome.runtime.connect(extensionId,
{'includeTlsChannelId': includeTlsChannelId});
port.onMessage.addListener(checkTlsChannelIdResponse);
port.postMessage(message);
},
getTlsChannelIdFromSendMessage: function(extensionId, includeTlsChannelId,
message) {
if (!checkRuntimeForTlsChannelId())
return;
if (!message)
message = kMessage;
chrome.runtime.sendMessage(extensionId, message,
{'includeTlsChannelId': includeTlsChannelId},
checkTlsChannelIdResponse);
}
};
}());
| Chilledheart/chromium | chrome/test/data/extensions/api_test/messaging/externally_connectable/sites/assertions.js | JavaScript | bsd-3-clause | 11,033 |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.support.appnavigation.app;
import com.example.android.support.appnavigation.R;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
public class OutsideTaskActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.outside_task);
}
public void onViewContent(View v) {
Intent intent = new Intent(Intent.ACTION_VIEW)
.setType("application/x-example")
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
startActivity(intent);
}
}
| madhavanks26/com.vliesaputra.deviceinformation | src/com/vliesaputra/cordova/plugins/android/support/samples/SupportAppNavigation/src/com/example/android/support/appnavigation/app/OutsideTaskActivity.java | Java | mit | 1,318 |
// No generics for getters and setters
({ set foo<T>(newFoo) {} })
| facebook/flow | src/parser/test/flow/invalid_syntax/migrated_0012.js | JavaScript | mit | 67 |
export const tinyNDArray: any;
| markogresak/DefinitelyTyped | types/poisson-disk-sampling/src/tiny-ndarray.d.ts | TypeScript | mit | 31 |
//------------------------------------------------------------------------------
// <copyright file="ValueOfAction.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <owner current="true" primary="true">[....]</owner>
//------------------------------------------------------------------------------
namespace System.Xml.Xsl.XsltOld {
using Res = System.Xml.Utils.Res;
using System;
using System.Diagnostics;
using System.Xml;
using System.Xml.XPath;
internal class ValueOfAction : CompiledAction {
private const int ResultStored = 2;
private int selectKey = Compiler.InvalidQueryKey;
private bool disableOutputEscaping;
private static Action s_BuiltInRule = new BuiltInRuleTextAction();
internal static Action BuiltInRule() {
Debug.Assert(s_BuiltInRule != null);
return s_BuiltInRule;
}
internal override void Compile(Compiler compiler) {
CompileAttributes(compiler);
CheckRequiredAttribute(compiler, selectKey != Compiler.InvalidQueryKey, "select");
CheckEmpty(compiler);
}
internal override bool CompileAttribute(Compiler compiler) {
string name = compiler.Input.LocalName;
string value = compiler.Input.Value;
if (Ref.Equal(name, compiler.Atoms.Select)) {
this.selectKey = compiler.AddQuery(value);
}
else if (Ref.Equal(name, compiler.Atoms.DisableOutputEscaping)) {
this.disableOutputEscaping = compiler.GetYesNo(value);
}
else {
return false;
}
return true;
}
internal override void Execute(Processor processor, ActionFrame frame) {
Debug.Assert(processor != null && frame != null);
switch (frame.State) {
case Initialized:
Debug.Assert(frame != null);
Debug.Assert(frame.NodeSet != null);
string value = processor.ValueOf(frame, this.selectKey);
if (processor.TextEvent(value, disableOutputEscaping)) {
frame.Finished();
}
else {
frame.StoredOutput = value;
frame.State = ResultStored;
}
break;
case ResultStored:
Debug.Assert(frame.StoredOutput != null);
processor.TextEvent(frame.StoredOutput);
frame.Finished();
break;
default:
Debug.Fail("Invalid ValueOfAction execution state");
break;
}
}
}
internal class BuiltInRuleTextAction : Action {
private const int ResultStored = 2;
internal override void Execute(Processor processor, ActionFrame frame) {
Debug.Assert(processor != null && frame != null);
switch (frame.State) {
case Initialized:
Debug.Assert(frame != null);
Debug.Assert(frame.NodeSet != null);
string value = processor.ValueOf(frame.NodeSet.Current);
if (processor.TextEvent(value, /*disableOutputEscaping:*/false)) {
frame.Finished();
}
else {
frame.StoredOutput = value;
frame.State = ResultStored;
}
break;
case ResultStored:
Debug.Assert(frame.StoredOutput != null);
processor.TextEvent(frame.StoredOutput);
frame.Finished();
break;
default:
Debug.Fail("Invalid BuiltInRuleTextAction execution state");
break;
}
}
}
}
| sekcheong/referencesource | System.Data.SqlXml/System/Xml/Xsl/XsltOld/ValueOfAction.cs | C# | mit | 3,999 |
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Component, Directive, ElementRef, Injector, Input, NgModule, destroyPlatform} from '@angular/core';
import {async} from '@angular/core/testing';
import {BrowserModule} from '@angular/platform-browser';
import {platformBrowserDynamic} from '@angular/platform-browser-dynamic';
import * as angular from '@angular/upgrade/src/angular_js';
import {UpgradeComponent, UpgradeModule, downgradeComponent} from '@angular/upgrade/static';
import {bootstrap, html, multiTrim} from '../test_helpers';
export function main() {
describe('examples', () => {
beforeEach(() => destroyPlatform());
afterEach(() => destroyPlatform());
it('should have angular 1 loaded', () => expect(angular.version.major).toBe(1));
it('should verify UpgradeAdapter example', async(() => {
// This is wrapping (upgrading) an Angular 1 component to be used in an Angular 2
// component
@Directive({selector: 'ng1'})
class Ng1Component extends UpgradeComponent {
@Input() title: string;
constructor(elementRef: ElementRef, injector: Injector) {
super('ng1', elementRef, injector);
}
}
// This is an Angular 2 component that will be downgraded
@Component({
selector: 'ng2',
template: 'ng2[<ng1 [title]="nameProp">transclude</ng1>](<ng-content></ng-content>)'
})
class Ng2Component {
@Input('name') nameProp: string;
}
// This module represents the Angular 2 pieces of the application
@NgModule({
declarations: [Ng1Component, Ng2Component],
entryComponents: [Ng2Component],
imports: [BrowserModule, UpgradeModule]
})
class Ng2Module {
ngDoBootstrap() { /* this is a placeholder to stop the boostrapper from complaining */
}
}
// This module represents the Angular 1 pieces of the application
const ng1Module =
angular
.module('myExample', [])
// This is an Angular 1 component that will be upgraded
.directive(
'ng1',
() => {
return {
scope: {title: '='},
transclude: true,
template: 'ng1[Hello {{title}}!](<span ng-transclude></span>)'
};
})
// This is wrapping (downgrading) an Angular 2 component to be used in Angular 1
.directive(
'ng2',
downgradeComponent({component: Ng2Component, inputs: ['nameProp: name']}));
// This is the (Angular 1) application bootstrap element
// Notice that it is actually a downgraded Angular 2 component
const element = html('<ng2 name="World">project</ng2>');
// Let's use a helper function to make this simpler
bootstrap(platformBrowserDynamic(), Ng2Module, element, ng1Module).then(upgrade => {
expect(multiTrim(element.textContent))
.toBe('ng2[ng1[Hello World!](transclude)](project)');
});
}));
});
}
| awerlang/angular | modules/@angular/upgrade/test/aot/integration/examples_spec.ts | TypeScript | mit | 3,443 |
/*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.nashorn.internal.objects.annotations;
import static jdk.nashorn.internal.objects.annotations.Attribute.DEFAULT_ATTRIBUTES;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to specify the getter method for a JavaScript "data" property.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Getter {
/**
* Name of the property. If empty, the name is inferred.
*/
public String name() default "";
/**
* Attribute flags for this setter.
*/
public int attributes() default DEFAULT_ATTRIBUTES;
/**
* Where this getter lives?
*/
public Where where() default Where.INSTANCE;
}
| rokn/Count_Words_2015 | testing/openjdk2/nashorn/src/jdk/nashorn/internal/objects/annotations/Getter.java | Java | mit | 2,016 |
<?php
/**
* Hoa
*
*
* @license
*
* New BSD License
*
* Copyright © 2007-2015, Hoa community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Hoa nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
namespace Hoa\Iterator;
/**
* Class \Hoa\Iterator\Glob.
*
* Extending the SPL GlobIterator class.
*
* @copyright Copyright © 2007-2015 Hoa community
* @license New BSD License
*/
class Glob extends \GlobIterator
{
}
| Halleck45/PhpMetricsZendServer | zray/vendor/hoa/iterator/Glob.php | PHP | mit | 1,879 |
// Type definitions for bootstrap4-toggle 3.6
// Project: https://github.com/gitbrent/bootstrap4-toggle, https://gitbrent.github.io/bootstrap4-toggle/
// Definitions by: Mitchell Grice <https://github.com/gricey432>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.7
/// <reference types="jquery"/>
interface BootstrapToggleOptions {
on?: string | undefined;
off?: string | undefined;
size?: string | undefined;
onstyle?: string | undefined;
offstyle?: string | undefined;
style?: string | undefined;
width?: number | string | null | undefined;
height?: number | string | null | undefined;
}
interface JQuery {
bootstrapToggle(options?: BootstrapToggleOptions): JQuery;
bootstrapToggle(command: "destroy" | "on" | "off" | "toggle" | "enable" | "disable"): JQuery;
}
| markogresak/DefinitelyTyped | types/bootstrap4-toggle/index.d.ts | TypeScript | mit | 852 |
var express = require('express');
var path = require('path');
var favicon = require('serve-favicon');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var fs = require('fs');
var morgan = require('morgan');
var routes = require('./routes/index');
var number = require('./routes/number');
var array = require('./routes/array');
var bool = require('./routes/bool');
var integer = require('./routes/int');
var string = require('./routes/string');
var byte = require('./routes/byte');
var date = require('./routes/date');
var datetime = require('./routes/datetime');
var datetimeRfc1123 = require('./routes/datetime-rfc1123');
var duration = require('./routes/duration');
var complex = require('./routes/complex');
var report = require('./routes/report');
var dictionary = require('./routes/dictionary');
var paths = require('./routes/paths');
var queries = require('./routes/queries');
var pathitem = require('./routes/pathitem');
var header = require('./routes/header');
var reqopt = require('./routes/reqopt');
var httpResponses = require('./routes/httpResponses');
var files = require('./routes/files');
var formData = require('./routes/formData');
var lros = require('./routes/lros');
var paging = require('./routes/paging');
var modelFlatten = require('./routes/model-flatten');
var azureUrl = require('./routes/azureUrl');
var azureSpecial = require('./routes/azureSpecials');
var parameterGrouping = require('./routes/azureParameterGrouping.js');
var validation = require('./routes/validation.js');
var customUri = require('./routes/customUri.js');
var xml = require('./routes/xml.js'); // XML serialization
var util = require('util');
var app = express();
//set up server log
var now = new Date();
var logFileName = 'AccTestServer-' + now.getHours() +
now.getMinutes() + now.getSeconds() + '.log';
var testResultDir = path.join(__dirname, '../../../../TestResults');
if (!fs.existsSync(testResultDir)) {
fs.mkdirSync(testResultDir);
}
var logfile = fs.createWriteStream(path.join(testResultDir, logFileName), {flags: 'a'});
app.use(morgan('combined', {stream: logfile}));
var azurecoverage = {};
var optionalCoverage = {};
var coverage = {
"getArrayNull": 0,
"getArrayEmpty": 0,
"putArrayEmpty": 0,
"getArrayInvalid": 0,
"getArrayBooleanValid": 0,
"putArrayBooleanValid": 0,
"getArrayBooleanWithNull": 0,
"getArrayBooleanWithString": 0,
"getArrayIntegerValid": 0,
"putArrayIntegerValid": 0,
"getArrayIntegerWithNull": 0,
"getArrayIntegerWithString": 0,
"getArrayLongValid": 0,
"putArrayLongValid": 0,
"getArrayLongWithNull": 0,
"getArrayLongWithString": 0,
"getArrayFloatValid": 0,
"putArrayFloatValid": 0,
"getArrayFloatWithNull": 0,
"getArrayFloatWithString": 0,
"getArrayDoubleValid": 0,
"putArrayDoubleValid": 0,
"getArrayDoubleWithNull": 0,
"getArrayDoubleWithString": 0,
"getArrayStringValid": 0,
"putArrayStringValid": 0,
"getArrayStringWithNull": 0,
"getArrayStringWithNumber": 0,
"getArrayDateValid": 0,
"putArrayDateValid": 0,
"getArrayDateWithNull": 0,
"getArrayDateWithInvalidChars": 0,
"getArrayDateTimeValid": 0,
"putArrayDateTimeValid": 0,
"getArrayDateTimeWithNull": 0,
"getArrayDateTimeWithInvalidChars": 0,
"getArrayDateTimeRfc1123Valid": 0,
"putArrayDateTimeRfc1123Valid": 0,
"getArrayDurationValid": 0,
"putArrayDurationValid": 0,
"getArrayUuidValid": 0,
"getArrayUuidWithInvalidChars": 0,
"putArrayUuidValid": 0,
"getArrayByteValid": 0,
"putArrayByteValid": 0,
"getArrayByteWithNull": 0,
"getArrayArrayNull": 0,
"getArrayArrayEmpty": 0,
"getArrayArrayItemNull": 0,
"getArrayArrayItemEmpty": 0,
"getArrayArrayValid": 0,
"putArrayArrayValid": 0,
"getArrayComplexNull": 0,
"getArrayComplexEmpty": 0,
"getArrayComplexItemNull": 0,
"getArrayComplexItemEmpty": 0,
"getArrayComplexValid": 0,
"putArrayComplexValid": 0,
"getArrayDictionaryNull": 0,
"getArrayDictionaryEmpty": 0,
"getArrayDictionaryItemNull": 0,
"getArrayDictionaryItemEmpty": 0,
"getArrayDictionaryValid": 0,
"putArrayDictionaryValid": 0,
"getBoolTrue" : 0,
"putBoolTrue" : 0,
"getBoolFalse" : 0,
"putBoolFalse" : 0,
"getBoolInvalid" : 0,
"getBoolNull" : 0,
"getByteNull": 0,
"getByteEmpty": 0,
"getByteNonAscii": 0,
"putByteNonAscii": 0,
"getByteInvalid": 0,
"getDateNull": 0,
"getDateInvalid": 0,
"getDateOverflow": 0,
"getDateUnderflow": 0,
"getDateMax": 0,
"putDateMax": 0,
"getDateMin": 0,
"putDateMin": 0,
"getDateTimeNull": 0,
"getDateTimeInvalid": 0,
"getDateTimeOverflow": 0,
"getDateTimeUnderflow": 0,
"putDateTimeMaxUtc": 0,
"getDateTimeMaxUtcLowercase": 0,
"getDateTimeMaxUtcUppercase": 0,
"getDateTimeMaxLocalPositiveOffsetLowercase": 0,
"getDateTimeMaxLocalPositiveOffsetUppercase": 0,
"getDateTimeMaxLocalNegativeOffsetLowercase": 0,
"getDateTimeMaxLocalNegativeOffsetUppercase": 0,
"getDateTimeMinUtc": 0,
"putDateTimeMinUtc": 0,
"getDateTimeMinLocalPositiveOffset": 0,
"getDateTimeMinLocalNegativeOffset": 0,
"getDateTimeRfc1123Null": 0,
"getDateTimeRfc1123Invalid": 0,
"getDateTimeRfc1123Overflow": 0,
"getDateTimeRfc1123Underflow": 0,
"getDateTimeRfc1123MinUtc": 0,
"putDateTimeRfc1123Max": 0,
"putDateTimeRfc1123Min": 0,
"getDateTimeRfc1123MaxUtcLowercase": 0,
"getDateTimeRfc1123MaxUtcUppercase": 0,
"getIntegerNull": 0,
"getIntegerInvalid": 0,
"getIntegerOverflow" : 0,
"getIntegerUnderflow": 0,
"getLongOverflow": 0,
"getLongUnderflow": 0,
"putIntegerMax": 0,
"putLongMax": 0,
"putIntegerMin": 0,
"putLongMin": 0,
"getNumberNull": 0,
"getFloatInvalid": 0,
"getDoubleInvalid": 0,
"getFloatBigScientificNotation": 0,
"putFloatBigScientificNotation": 0,
"getDoubleBigScientificNotation": 0,
"putDoubleBigScientificNotation": 0,
"getDoubleBigPositiveDecimal" : 0,
"putDoubleBigPositiveDecimal" : 0,
"getDoubleBigNegativeDecimal" : 0,
"putDoubleBigNegativeDecimal" : 0,
"getFloatSmallScientificNotation" : 0,
"putFloatSmallScientificNotation" : 0,
"getDoubleSmallScientificNotation" : 0,
"putDoubleSmallScientificNotation" : 0,
"getStringNull": 0,
"putStringNull": 0,
"getStringEmpty": 0,
"putStringEmpty": 0,
"getStringMultiByteCharacters": 0,
"putStringMultiByteCharacters": 0,
"getStringWithLeadingAndTrailingWhitespace" : 0,
"putStringWithLeadingAndTrailingWhitespace" : 0,
"getStringNotProvided": 0,
"getEnumNotExpandable": 0,
"putEnumNotExpandable":0,
"putComplexBasicValid": 0,
"getComplexBasicValid": 0,
"getComplexBasicEmpty": 0,
"getComplexBasicNotProvided": 0,
"getComplexBasicNull": 0,
"getComplexBasicInvalid": 0,
"putComplexPrimitiveInteger": 0,
"putComplexPrimitiveLong": 0,
"putComplexPrimitiveFloat": 0,
"putComplexPrimitiveDouble": 0,
"putComplexPrimitiveBool": 0,
"putComplexPrimitiveString": 0,
"putComplexPrimitiveDate": 0,
"putComplexPrimitiveDateTime": 0,
"putComplexPrimitiveDateTimeRfc1123": 0,
"putComplexPrimitiveDuration": 0,
"putComplexPrimitiveByte": 0,
"getComplexPrimitiveInteger": 0,
"getComplexPrimitiveLong": 0,
"getComplexPrimitiveFloat": 0,
"getComplexPrimitiveDouble": 0,
"getComplexPrimitiveBool": 0,
"getComplexPrimitiveString": 0,
"getComplexPrimitiveDate": 0,
"getComplexPrimitiveDateTime": 0,
"getComplexPrimitiveDateTimeRfc1123": 0,
"getComplexPrimitiveDuration": 0,
"getComplexPrimitiveByte": 0,
"putComplexArrayValid": 0,
"putComplexArrayEmpty": 0,
"getComplexArrayValid": 0,
"getComplexArrayEmpty": 0,
"getComplexArrayNotProvided": 0,
"putComplexDictionaryValid": 0,
"putComplexDictionaryEmpty": 0,
"getComplexDictionaryValid": 0,
"getComplexDictionaryEmpty": 0,
"getComplexDictionaryNull": 0,
"getComplexDictionaryNotProvided": 0,
"putComplexInheritanceValid": 0,
"getComplexInheritanceValid": 0,
"putComplexPolymorphismValid": 0,
"getComplexPolymorphismValid": 0,
"putComplexPolymorphicRecursiveValid": 0,
"getComplexPolymorphicRecursiveValid": 0,
"putComplexReadOnlyPropertyValid": 0,
"UrlPathsBoolFalse": 0,
"UrlPathsBoolTrue": 0,
"UrlPathsIntPositive": 0,
"UrlPathsIntNegative": 0,
"UrlPathsLongPositive": 0,
"UrlPathsLongNegative": 0,
"UrlPathsFloatPositive": 0,
"UrlPathsFloatNegative": 0,
"UrlPathsDoublePositive": 0,
"UrlPathsDoubleNegative": 0,
"UrlPathsStringUrlEncoded": 0,
"UrlPathsStringEmpty": 0,
"UrlPathsEnumValid":0,
"UrlPathsByteMultiByte": 0,
"UrlPathsByteEmpty": 0,
"UrlPathsDateValid": 0,
"UrlPathsDateTimeValid": 0,
"UrlQueriesBoolFalse": 0,
"UrlQueriesBoolTrue": 0,
"UrlQueriesBoolNull": 0,
"UrlQueriesIntPositive": 0,
"UrlQueriesIntNegative": 0,
"UrlQueriesIntNull": 0,
"UrlQueriesLongPositive": 0,
"UrlQueriesLongNegative": 0,
"UrlQueriesLongNull": 0,
"UrlQueriesFloatPositive": 0,
"UrlQueriesFloatNegative": 0,
"UrlQueriesFloatNull": 0,
"UrlQueriesDoublePositive": 0,
"UrlQueriesDoubleNegative": 0,
"UrlQueriesDoubleNull": 0,
"UrlQueriesStringUrlEncoded": 0,
"UrlQueriesStringEmpty": 0,
"UrlQueriesStringNull": 0,
"UrlQueriesEnumValid": 0,
"UrlQueriesEnumNull": 0,
"UrlQueriesByteMultiByte": 0,
"UrlQueriesByteEmpty": 0,
"UrlQueriesByteNull": 0,
"UrlQueriesDateValid": 0,
"UrlQueriesDateNull": 0,
"UrlQueriesDateTimeValid": 0,
"UrlQueriesDateTimeNull": 0,
"UrlQueriesArrayCsvNull": 0,
"UrlQueriesArrayCsvEmpty": 0,
"UrlQueriesArrayCsvValid": 0,
//Once all the languages implement this test, the scenario counter should be reset to zero. It is currently implemented in C# and Python
"UrlQueriesArrayMultiNull": 1,
"UrlQueriesArrayMultiEmpty": 1,
"UrlQueriesArrayMultiValid": 1,
"UrlQueriesArraySsvValid": 0,
"UrlQueriesArrayPipesValid": 0,
"UrlQueriesArrayTsvValid": 0,
"UrlPathItemGetAll": 0,
"UrlPathItemGetGlobalNull": 0,
"UrlPathItemGetGlobalAndLocalNull": 0,
"UrlPathItemGetPathItemAndLocalNull": 0,
"putDictionaryEmpty": 0,
"getDictionaryNull": 0,
"getDictionaryEmpty": 0,
"getDictionaryInvalid": 0,
"getDictionaryNullValue": 0,
"getDictionaryNullkey": 0,
"getDictionaryKeyEmptyString": 0,
"getDictionaryBooleanValid": 0,
"getDictionaryBooleanWithNull": 0,
"getDictionaryBooleanWithString": 0,
"getDictionaryIntegerValid": 0,
"getDictionaryIntegerWithNull": 0,
"getDictionaryIntegerWithString": 0,
"getDictionaryLongValid": 0,
"getDictionaryLongWithNull": 0,
"getDictionaryLongWithString": 0,
"getDictionaryFloatValid": 0,
"getDictionaryFloatWithNull": 0,
"getDictionaryFloatWithString": 0,
"getDictionaryDoubleValid": 0,
"getDictionaryDoubleWithNull": 0,
"getDictionaryDoubleWithString": 0,
"getDictionaryStringValid": 0,
"getDictionaryStringWithNull": 0,
"getDictionaryStringWithNumber": 0,
"getDictionaryDateValid": 0,
"getDictionaryDateWithNull": 0,
"getDictionaryDateWithInvalidChars": 0,
"getDictionaryDateTimeValid": 0,
"getDictionaryDateTimeWithNull": 0,
"getDictionaryDateTimeWithInvalidChars": 0,
"getDictionaryDateTimeRfc1123Valid": 0,
"getDictionaryDurationValid": 0,
"getDictionaryByteValid": 0,
"getDictionaryByteWithNull": 0,
"putDictionaryBooleanValid": 0,
"putDictionaryIntegerValid": 0,
"putDictionaryLongValid": 0,
"putDictionaryFloatValid": 0,
"putDictionaryDoubleValid": 0,
"putDictionaryStringValid": 0,
"putDictionaryDateValid": 0,
"putDictionaryDateTimeValid": 0,
"putDictionaryDateTimeRfc1123Valid": 0,
"putDictionaryDurationValid": 0,
"putDictionaryByteValid": 0,
"getDictionaryComplexNull": 0,
"getDictionaryComplexEmpty": 0,
"getDictionaryComplexItemNull": 0,
"getDictionaryComplexItemEmpty": 0,
"getDictionaryComplexValid": 0,
"putDictionaryComplexValid": 0,
"getDictionaryArrayNull": 0,
"getDictionaryArrayEmpty": 0,
"getDictionaryArrayItemNull": 0,
"getDictionaryArrayItemEmpty": 0,
"getDictionaryArrayValid": 0,
"putDictionaryArrayValid": 0,
"getDictionaryDictionaryNull": 0,
"getDictionaryDictionaryEmpty": 0,
"getDictionaryDictionaryItemNull": 0,
"getDictionaryDictionaryItemEmpty": 0,
"getDictionaryDictionaryValid": 0,
"putDictionaryDictionaryValid": 0,
"putDurationPositive": 0,
"getDurationNull": 0,
"getDurationInvalid": 0,
"getDurationPositive": 0,
"HeaderParameterExistingKey": 0,
"HeaderResponseExistingKey": 0,
"HeaderResponseProtectedKey": 0,
"HeaderParameterIntegerPositive": 0,
"HeaderParameterIntegerNegative": 0,
"HeaderParameterLongPositive": 0,
"HeaderParameterLongNegative": 0,
"HeaderParameterFloatPositive": 0,
"HeaderParameterFloatNegative": 0,
"HeaderParameterDoublePositive": 0,
"HeaderParameterDoubleNegative": 0,
"HeaderParameterBoolTrue": 0,
"HeaderParameterBoolFalse": 0,
"HeaderParameterStringValid": 0,
"HeaderParameterStringNull": 0,
"HeaderParameterStringEmpty": 0,
"HeaderParameterDateValid": 0,
"HeaderParameterDateMin": 0,
"HeaderParameterDateTimeValid": 0,
"HeaderParameterDateTimeMin": 0,
"HeaderParameterDateTimeRfc1123Valid": 0,
"HeaderParameterDateTimeRfc1123Min": 0,
"HeaderParameterBytesValid": 0,
"HeaderParameterDurationValid": 0,
"HeaderResponseIntegerPositive": 0,
"HeaderResponseIntegerNegative": 0,
"HeaderResponseLongPositive": 0,
"HeaderResponseLongNegative": 0,
"HeaderResponseFloatPositive": 0,
"HeaderResponseFloatNegative": 0,
"HeaderResponseDoublePositive": 0,
"HeaderResponseDoubleNegative": 0,
"HeaderResponseBoolTrue": 0,
"HeaderResponseBoolFalse": 0,
"HeaderResponseStringValid": 0,
"HeaderResponseStringNull": 0,
"HeaderResponseStringEmpty": 0,
"HeaderParameterEnumValid": 0,
"HeaderParameterEnumNull": 0,
"HeaderResponseEnumValid": 0,
"HeaderResponseEnumNull": 0,
"HeaderResponseDateValid": 0,
"HeaderResponseDateMin": 0,
"HeaderResponseDateTimeValid": 0,
"HeaderResponseDateTimeMin": 0,
"HeaderResponseDateTimeRfc1123Valid": 0,
"HeaderResponseDateTimeRfc1123Min": 0,
"HeaderResponseBytesValid": 0,
"HeaderResponseDurationValid": 0,
"FormdataStreamUploadFile": 0,
"StreamUploadFile": 0,
"ConstantsInPath": 0,
"ConstantsInBody": 0,
"CustomBaseUri": 0,
//Once all the languages implement this test, the scenario counter should be reset to zero. It is currently implemented in C#, Python and node.js
"CustomBaseUriMoreOptions": 1,
'getModelFlattenArray': 0,
'putModelFlattenArray': 0,
'getModelFlattenDictionary': 0,
'putModelFlattenDictionary': 0,
'getModelFlattenResourceCollection': 0,
'putModelFlattenResourceCollection': 0,
'putModelFlattenCustomBase': 0,
'postModelFlattenCustomParameter': 0,
'putModelFlattenCustomGroupedParameter': 0,
/* TODO: only C#, Python and node.js support the base64url format currently. Exclude these tests from code coverage until it is implemented in other languages */
"getStringBase64Encoded": 1,
"getStringBase64UrlEncoded": 1,
"putStringBase64UrlEncoded": 1,
"getStringNullBase64UrlEncoding": 1,
"getArrayBase64Url": 1,
"getDictionaryBase64Url": 1,
"UrlPathsStringBase64Url": 1,
"UrlPathsArrayCSVInPath": 1,
/* TODO: only C# and Python support the unixtime format currently. Exclude these tests from code coverage until it is implemented in other languages */
"getUnixTime": 1,
"getInvalidUnixTime": 1,
"getNullUnixTime": 1,
"putUnixTime": 1,
"UrlPathsIntUnixTime": 1,
/* TODO: Once all the languages implement these tests, the scenario counters should be reset to zero. It is currently implemented in Python */
"getDecimalInvalid": 1,
"getDecimalBig": 1,
"getDecimalSmall": 1,
"getDecimalBigPositiveDecimal" : 1,
"getDecimalBigNegativeDecimal" : 1,
"putDecimalBig": 1,
"putDecimalSmall": 1,
"putDecimalBigPositiveDecimal" : 1,
"getEnumReferenced" : 1,
"putEnumReferenced" : 1,
"getEnumReferencedConstant" : 1,
"putEnumReferencedConstant" : 1
};
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'jade');
// uncomment after placing your favicon in /public
//app.use(favicon(__dirname + '/public/favicon.ico'));
app.use(logger('dev'));
app.use(bodyParser.json({strict: false}));
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use('/', routes);
app.use('/bool', new bool(coverage).router);
app.use('/int', new integer(coverage).router);
app.use('/number', new number(coverage).router);
app.use('/string', new string(coverage).router);
app.use('/byte', new byte(coverage).router);
app.use('/date', new date(coverage).router);
app.use('/datetime', new datetime(coverage, optionalCoverage).router);
app.use('/datetimeRfc1123', new datetimeRfc1123(coverage).router);
app.use('/duration', new duration(coverage, optionalCoverage).router);
app.use('/array', new array(coverage).router);
app.use('/complex', new complex(coverage).router);
app.use('/dictionary', new dictionary(coverage).router);
app.use('/paths', new paths(coverage).router);
app.use('/queries', new queries(coverage).router);
app.use('/pathitem', new pathitem(coverage).router);
app.use('/header', new header(coverage, optionalCoverage).router);
app.use('/reqopt', new reqopt(coverage).router);
app.use('/files', new files(coverage).router);
app.use('/formdata', new formData(coverage).router);
app.use('/http', new httpResponses(coverage, optionalCoverage).router);
app.use('/model-flatten', new modelFlatten(coverage).router);
app.use('/lro', new lros(azurecoverage).router);
app.use('/paging', new paging(azurecoverage).router);
app.use('/azurespecials', new azureSpecial(azurecoverage).router);
app.use('/report', new report(coverage, azurecoverage).router);
app.use('/subscriptions', new azureUrl(azurecoverage).router);
app.use('/parameterGrouping', new parameterGrouping(azurecoverage).router);
app.use('/validation', new validation(coverage).router);
app.use('/customUri', new customUri(coverage).router);
app.use('/xml', new xml().router);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
var err = new Error('Not Found');
err.status = 404;
next(err);
});
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.end(JSON.stringify(err));
});
module.exports = app;
| lmazuel/autorest | src/dev/TestServer/server/app.js | JavaScript | mit | 18,237 |
/**
* Copyright (c) 2010-2019 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.miio.internal.basic;
/**
* Various types of parameters to be send
*
* @author Marcel Verpaalen - Initial contribution
*/
public enum CommandParameterType {
NONE("none"),
EMPTY("empty"),
ONOFF("onoff"),
ONOFFPARA("onoffpara"),
STRING("string"),
CUSTOMSTRING("customstring"),
NUMBER("number"),
COLOR("color"),
UNKNOWN("unknown");
private String text;
CommandParameterType(String text) {
this.text = text;
}
public static CommandParameterType fromString(String text) {
for (CommandParameterType param : CommandParameterType.values()) {
if (param.text.equalsIgnoreCase(text)) {
return param;
}
}
return UNKNOWN;
}
}
| theoweiss/openhab2 | bundles/org.openhab.binding.miio/src/main/java/org/openhab/binding/miio/internal/basic/CommandParameterType.java | Java | epl-1.0 | 1,157 |
<?php
/**
* This code was generated by
* \ / _ _ _| _ _
* | (_)\/(_)(_|\/| |(/_ v1.0.0
* / /
*/
namespace Twilio\Rest\Notify\V1\Service;
use Twilio\Options;
use Twilio\Values;
/**
* PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution.
*/
abstract class BindingOptions {
/**
* @param string $tag The tag
* @param string $notificationProtocolVersion The notification_protocol_version
* @param string $credentialSid The credential_sid
* @param string $endpoint The endpoint
* @return CreateBindingOptions Options builder
*/
public static function create($tag = Values::NONE, $notificationProtocolVersion = Values::NONE, $credentialSid = Values::NONE, $endpoint = Values::NONE) {
return new CreateBindingOptions($tag, $notificationProtocolVersion, $credentialSid, $endpoint);
}
/**
* @param \DateTime $startDate The start_date
* @param \DateTime $endDate The end_date
* @param string $identity The identity
* @param string $tag The tag
* @return ReadBindingOptions Options builder
*/
public static function read($startDate = Values::NONE, $endDate = Values::NONE, $identity = Values::NONE, $tag = Values::NONE) {
return new ReadBindingOptions($startDate, $endDate, $identity, $tag);
}
}
class CreateBindingOptions extends Options {
/**
* @param string $tag The tag
* @param string $notificationProtocolVersion The notification_protocol_version
* @param string $credentialSid The credential_sid
* @param string $endpoint The endpoint
*/
public function __construct($tag = Values::NONE, $notificationProtocolVersion = Values::NONE, $credentialSid = Values::NONE, $endpoint = Values::NONE) {
$this->options['tag'] = $tag;
$this->options['notificationProtocolVersion'] = $notificationProtocolVersion;
$this->options['credentialSid'] = $credentialSid;
$this->options['endpoint'] = $endpoint;
}
/**
* The tag
*
* @param string $tag The tag
* @return $this Fluent Builder
*/
public function setTag($tag) {
$this->options['tag'] = $tag;
return $this;
}
/**
* The notification_protocol_version
*
* @param string $notificationProtocolVersion The notification_protocol_version
* @return $this Fluent Builder
*/
public function setNotificationProtocolVersion($notificationProtocolVersion) {
$this->options['notificationProtocolVersion'] = $notificationProtocolVersion;
return $this;
}
/**
* The credential_sid
*
* @param string $credentialSid The credential_sid
* @return $this Fluent Builder
*/
public function setCredentialSid($credentialSid) {
$this->options['credentialSid'] = $credentialSid;
return $this;
}
/**
* The endpoint
*
* @param string $endpoint The endpoint
* @return $this Fluent Builder
*/
public function setEndpoint($endpoint) {
$this->options['endpoint'] = $endpoint;
return $this;
}
/**
* Provide a friendly representation
*
* @return string Machine friendly representation
*/
public function __toString() {
$options = array();
foreach ($this->options as $key => $value) {
if ($value != Values::NONE) {
$options[] = "$key=$value";
}
}
return '[Twilio.Notify.V1.CreateBindingOptions ' . implode(' ', $options) . ']';
}
}
class ReadBindingOptions extends Options {
/**
* @param \DateTime $startDate The start_date
* @param \DateTime $endDate The end_date
* @param string $identity The identity
* @param string $tag The tag
*/
public function __construct($startDate = Values::NONE, $endDate = Values::NONE, $identity = Values::NONE, $tag = Values::NONE) {
$this->options['startDate'] = $startDate;
$this->options['endDate'] = $endDate;
$this->options['identity'] = $identity;
$this->options['tag'] = $tag;
}
/**
* The start_date
*
* @param \DateTime $startDate The start_date
* @return $this Fluent Builder
*/
public function setStartDate($startDate) {
$this->options['startDate'] = $startDate;
return $this;
}
/**
* The end_date
*
* @param \DateTime $endDate The end_date
* @return $this Fluent Builder
*/
public function setEndDate($endDate) {
$this->options['endDate'] = $endDate;
return $this;
}
/**
* The identity
*
* @param string $identity The identity
* @return $this Fluent Builder
*/
public function setIdentity($identity) {
$this->options['identity'] = $identity;
return $this;
}
/**
* The tag
*
* @param string $tag The tag
* @return $this Fluent Builder
*/
public function setTag($tag) {
$this->options['tag'] = $tag;
return $this;
}
/**
* Provide a friendly representation
*
* @return string Machine friendly representation
*/
public function __toString() {
$options = array();
foreach ($this->options as $key => $value) {
if ($value != Values::NONE) {
$options[] = "$key=$value";
}
}
return '[Twilio.Notify.V1.ReadBindingOptions ' . implode(' ', $options) . ']';
}
} | Haynie-Research-and-Development/jarvis | web/webapi/sms/vendor/twilio/sdk/Twilio/Rest/Notify/V1/Service/BindingOptions.php | PHP | gpl-2.0 | 5,550 |
<?php
namespace Neos\Neos\Controller\Module;
/*
* This file is part of the Neos.Neos package.
*
* (c) Contributors of the Neos Project - www.neos.io
*
* This package is Open Source Software. For the full copyright and license
* information, please view the LICENSE file which was distributed with this
* source code.
*/
use Neos\Flow\Annotations as Flow;
/**
* @Flow\Scope("singleton")
*/
class UserController extends AbstractModuleController
{
}
| aertmann/neos-development-collection | Neos.Neos/Classes/Controller/Module/UserController.php | PHP | gpl-3.0 | 460 |
namespace MixERP.Net.WebControls.Flag
{
public partial class FlagControl
{
private bool disposed;
public override void Dispose()
{
if (!this.disposed)
{
this.Dispose(true);
base.Dispose();
}
}
private void Dispose(bool disposing)
{
if (!disposing)
{
return;
}
if (this.container != null)
{
this.container.Dispose();
this.container = null;
}
if (this.flagDropDownlist != null)
{
this.flagDropDownlist.Dispose();
this.flagDropDownlist = null;
}
if (this.updateButton != null)
{
this.updateButton.Dispose();
this.updateButton = null;
}
this.disposed = true;
}
}
} | gguruss/mixerp | src/Libraries/Server Controls/Project/MixERP.Net.WebControls.Flag/FlagControl/IDisposable.cs | C# | gpl-3.0 | 972 |
/*
* Copyright (C) 2014 Arpit Khurana <arpitkh96@gmail.com>
*
* This file is part of Amaze File Manager.
*
* Amaze File Manager is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.amaze.filemanager.utils;
import com.stericson.RootTools.RootTools;
import com.stericson.RootTools.execution.Command;
import java.io.File;
import java.util.ArrayList;
public class RootHelper
{
public static String runAndWait(String cmd,boolean root)
{
Command c=new Command(0,cmd) {
@Override
public void commandOutput(int i, String s) {
}
@Override
public void commandTerminated(int i, String s) {
}
@Override
public void commandCompleted(int i, int i2) {
}
};
try
{RootTools.getShell(root).add(c);}
catch (Exception e)
{
// Logger.errorST("Exception when trying to run shell command", e);
return null;
}
if (!waitForCommand(c))
{
return null;
}
return c.toString();
}
public static ArrayList<String> runAndWait1(String cmd, final boolean root)
{
final ArrayList<String> output=new ArrayList<String>();
Command cc=new Command(1,cmd) {
@Override
public void commandOutput(int i, String s) {
output.add(s);
// System.out.println("output "+root+s);
}
@Override
public void commandTerminated(int i, String s) {
System.out.println("error"+root+s);
}
@Override
public void commandCompleted(int i, int i2) {
}
};
try {
RootTools.getShell(root).add(cc);
}
catch (Exception e)
{
// Logger.errorST("Exception when trying to run shell command", e);
e.printStackTrace();
return null;
}
if (!waitForCommand(cc))
{
return null;
}
return output;
}
private static boolean waitForCommand(Command cmd)
{
while (!cmd.isFinished())
{
synchronized (cmd)
{
try
{
if (!cmd.isFinished())
{
cmd.wait(2000);
}
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
if (!cmd.isExecuting() && !cmd.isFinished())
{
// Logger.errorST("Error: Command is not executing and is not finished!");
return false;
}
}
//Logger.debug("Command Finished!");
return true;
}
public static String getCommandLineString(String input) {
return input.replaceAll(UNIX_ESCAPE_EXPRESSION, "\\\\$1");
} private static final String UNIX_ESCAPE_EXPRESSION = "(\\(|\\)|\\[|\\]|\\s|\'|\"|`|\\{|\\}|&|\\\\|\\?)";
static Futils futils=new Futils();
public static ArrayList<String[]> getFilesList(boolean showSize,String path,boolean showHidden){
File f=new File(path);
ArrayList<String[]> files=new ArrayList<String[]>();
try {
if(f.exists() && f.isDirectory()){
for(File x:f.listFiles()){
String k="",size="";
if(x.isDirectory())
{k="-1";
if(showSize)size=""+getCount(x);
}else if(showSize)size=""+x.length();
if(showHidden){
files.add(new String[]{x.getPath(),"",parseFilePermission(x),k,x.lastModified()+"",size});
}
else{if(!x.isHidden()){files.add(new String[]{x.getPath(),"",parseFilePermission(x),k,x.lastModified()+"",size});}}
}
}}catch (Exception e){}
return files;}
public static String[] addFile(File x,boolean showSize,boolean showHidden){
String k="",size="";
if(x.isDirectory())
{
k="-1";
if(showSize)size=""+getCount(x);
}else if(showSize)
size=""+x.length();
if(showHidden){
return (new String[]{x.getPath(),"",parseFilePermission(x),k,x.lastModified()+"",
size});
}
else
{
if(!x.isHidden())
{return (new String[]{x.getPath(),"",parseFilePermission(x),k,x
.lastModified()+"",size});
}
}
return null;
}
public static String parseFilePermission(File f){
String per="";
if(f.canRead()){per=per+"r";}
if(f.canWrite()){per=per+"w";}
if(f.canExecute()){per=per+"x";}
return per;}
public static ArrayList<String[]> getFilesList(String path,boolean root,boolean showHidden,boolean showSize)
{
String p = " ";
if (showHidden) p = "a ";
Futils futils = new Futils();
ArrayList<String[]> a = new ArrayList<String[]>();
ArrayList<String> ls = new ArrayList<String>();
if (root) {
if (!path.startsWith("/storage")) {
String cpath = getCommandLineString(path);
ls = runAndWait1("ls -l" + p + cpath, root);
if(ls!=null){
for (String file : ls) {
if (!file.contains("Permission denied"))
try {
String[] array = futils.parseName(file);
array[0] = path + "/" + array[0];
a.add(array);
} catch (Exception e) {
System.out.println(file);
e.printStackTrace();
}
}}
} else if (futils.canListFiles(new File(path))) {
a = getFilesList(showSize,path, showHidden);
} else {
a = new ArrayList<String[]>();
}
} else if (futils.canListFiles(new File(path))) {
a = getFilesList(showSize,path, showHidden);
} else {
a = new ArrayList<String[]>();
}
if (a.size() == 0 && futils.canListFiles(new File(path))) {
a = getFilesList(showSize,path, showHidden);
}
return a;
}
public static Integer getCount(File f){
if(f.exists() && f.canRead() && f.isDirectory()){
try{return f.listFiles().length;}catch(Exception e){return 0;}
}
return null;}} | jiyuren/AmazeFileManager-1 | src/main/java/com/amaze/filemanager/utils/RootHelper.java | Java | gpl-3.0 | 7,137 |
/*
* eXist Open Source Native XML Database
* Copyright (C) 2009-2011 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id$
*/
package org.exist.debuggee.dbgp.packets;
import org.apache.mina.core.session.IoSession;
/**
* @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
*
*/
public class StepInto extends AbstractCommandContinuation {
public StepInto(IoSession session, String args) {
super(session, args);
}
/* (non-Javadoc)
* @see org.exist.debuggee.dgbp.packets.Command#exec()
*/
@Override
public synchronized void exec() {
getJoint().continuation(this);
}
public synchronized byte[] responseBytes() {
String responce = xml_declaration +
"<response " +
namespaces +
"command=\"step_into\" " +
"status=\""+getStatus()+"\" " +
"reason=\"ok\" " +
"transaction_id=\""+transactionID+"\"/>";
return responce.getBytes();
}
public byte[] commandBytes() {
String command = "step_into -i "+transactionID;
return command.getBytes();
}
public int getType() {
return STEP_INTO;
}
public boolean is(int type) {
return (type == STEP_INTO);
}
public String toString() {
return "step_into ["+transactionID+"]";
}
}
| MjAbuz/exist | extensions/debuggee/src/org/exist/debuggee/dbgp/packets/StepInto.java | Java | lgpl-2.1 | 1,942 |
package bootstrappolicy
import (
rbacrest "k8s.io/kubernetes/pkg/registry/rbac/rest"
)
func Policy() *rbacrest.PolicyData {
return &rbacrest.PolicyData{
ClusterRoles: GetBootstrapClusterRoles(),
ClusterRoleBindings: GetBootstrapClusterRoleBindings(),
Roles: GetBootstrapNamespaceRoles(),
RoleBindings: GetBootstrapNamespaceRoleBindings(),
}
}
| kedgeproject/kedge | vendor/github.com/openshift/origin/pkg/cmd/server/bootstrappolicy/all.go | GO | apache-2.0 | 384 |
package com.codahale.metrics.health.jvm;
import com.codahale.metrics.health.HealthCheck;
import com.codahale.metrics.jvm.ThreadDeadlockDetector;
import java.util.Set;
/**
* A health check which returns healthy if no threads are deadlocked.
*/
public class ThreadDeadlockHealthCheck extends HealthCheck {
private final ThreadDeadlockDetector detector;
/**
* Creates a new health check.
*/
public ThreadDeadlockHealthCheck() {
this(new ThreadDeadlockDetector());
}
/**
* Creates a new health check with the given detector.
*
* @param detector a thread deadlock detector
*/
public ThreadDeadlockHealthCheck(ThreadDeadlockDetector detector) {
this.detector = detector;
}
@Override
protected Result check() throws Exception {
final Set<String> threads = detector.getDeadlockedThreads();
if (threads.isEmpty()) {
return Result.healthy();
}
return Result.unhealthy(threads.toString());
}
}
| gburton1/metrics | metrics-healthchecks/src/main/java/com/codahale/metrics/health/jvm/ThreadDeadlockHealthCheck.java | Java | apache-2.0 | 1,021 |
package io.dropwizard.jdbi.args;
import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.tweak.Argument;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Instant;
import java.util.Calendar;
import java.util.Optional;
/**
* An {@link Argument} for {@link Instant} objects.
*/
public class InstantArgument implements Argument {
private final Instant instant;
private final Optional<Calendar> calendar;
protected InstantArgument(final Instant instant, final Optional<Calendar> calendar) {
this.instant = instant;
this.calendar = calendar;
}
@Override
public void apply(int position, PreparedStatement statement, StatementContext ctx) throws SQLException {
if (instant != null) {
if (calendar.isPresent()) {
// We need to make a clone, because Calendar is not thread-safe
// and some JDBC drivers mutate it during time calculations
final Calendar calendarClone = (Calendar) calendar.get().clone();
statement.setTimestamp(position, Timestamp.from(instant), calendarClone);
} else {
statement.setTimestamp(position, Timestamp.from(instant));
}
} else {
statement.setNull(position, Types.TIMESTAMP);
}
}
}
| patrox/dropwizard | dropwizard-jdbi/src/main/java/io/dropwizard/jdbi/args/InstantArgument.java | Java | apache-2.0 | 1,397 |
package cache
import (
"time"
kapi "k8s.io/kubernetes/pkg/api"
errors "k8s.io/kubernetes/pkg/api/errors"
"k8s.io/kubernetes/pkg/client/cache"
"k8s.io/kubernetes/pkg/fields"
"k8s.io/kubernetes/pkg/labels"
"k8s.io/kubernetes/pkg/runtime"
"k8s.io/kubernetes/pkg/watch"
authorizationapi "github.com/openshift/origin/pkg/authorization/api"
"github.com/openshift/origin/pkg/authorization/client"
clusterbindingregistry "github.com/openshift/origin/pkg/authorization/registry/clusterpolicybinding"
)
type readOnlyClusterPolicyBindingCache struct {
registry clusterbindingregistry.WatchingRegistry
indexer cache.Indexer
reflector *cache.Reflector
keyFunc cache.KeyFunc
}
func NewReadOnlyClusterPolicyBindingCache(registry clusterbindingregistry.WatchingRegistry) *readOnlyClusterPolicyBindingCache {
ctx := kapi.WithNamespace(kapi.NewContext(), kapi.NamespaceAll)
indexer := cache.NewIndexer(cache.MetaNamespaceKeyFunc, cache.Indexers{"namespace": cache.MetaNamespaceIndexFunc})
reflector := cache.NewReflector(
&cache.ListWatch{
ListFunc: func() (runtime.Object, error) {
return registry.ListClusterPolicyBindings(ctx, labels.Everything(), fields.Everything())
},
WatchFunc: func(resourceVersion string) (watch.Interface, error) {
return registry.WatchClusterPolicyBindings(ctx, labels.Everything(), fields.Everything(), resourceVersion)
},
},
&authorizationapi.ClusterPolicyBinding{},
indexer,
2*time.Minute,
)
return &readOnlyClusterPolicyBindingCache{
registry: registry,
indexer: indexer,
reflector: reflector,
keyFunc: cache.MetaNamespaceKeyFunc,
}
}
// Run begins watching and synchronizing the cache
func (c *readOnlyClusterPolicyBindingCache) Run() {
c.reflector.Run()
}
// RunUntil starts a watch and handles watch events. Will restart the watch if it is closed.
// RunUntil starts a goroutine and returns immediately. It will exit when stopCh is closed.
func (c *readOnlyClusterPolicyBindingCache) RunUntil(stopChannel <-chan struct{}) {
c.reflector.RunUntil(stopChannel)
}
// LastSyncResourceVersion exposes the LastSyncResourceVersion of the internal reflector
func (c *readOnlyClusterPolicyBindingCache) LastSyncResourceVersion() string {
return c.reflector.LastSyncResourceVersion()
}
func (c *readOnlyClusterPolicyBindingCache) List(label labels.Selector, field fields.Selector) (*authorizationapi.ClusterPolicyBindingList, error) {
clusterPolicyBindingList := &authorizationapi.ClusterPolicyBindingList{}
returnedList := c.indexer.List()
for i := range returnedList {
clusterPolicyBinding, castOK := returnedList[i].(*authorizationapi.ClusterPolicyBinding)
if !castOK {
return clusterPolicyBindingList, errors.NewInvalid("ClusterPolicyBinding", "clusterPolicyBinding", []error{})
}
if label.Matches(labels.Set(clusterPolicyBinding.Labels)) && field.Matches(authorizationapi.ClusterPolicyBindingToSelectableFields(clusterPolicyBinding)) {
clusterPolicyBindingList.Items = append(clusterPolicyBindingList.Items, *clusterPolicyBinding)
}
}
return clusterPolicyBindingList, nil
}
func (c *readOnlyClusterPolicyBindingCache) Get(name string) (*authorizationapi.ClusterPolicyBinding, error) {
keyObj := &authorizationapi.ClusterPolicyBinding{ObjectMeta: kapi.ObjectMeta{Name: name}}
key, _ := c.keyFunc(keyObj)
item, exists, getErr := c.indexer.GetByKey(key)
if getErr != nil {
return &authorizationapi.ClusterPolicyBinding{}, getErr
}
if !exists {
existsErr := errors.NewNotFound("ClusterPolicyBinding", name)
return &authorizationapi.ClusterPolicyBinding{}, existsErr
}
clusterPolicyBinding, castOK := item.(*authorizationapi.ClusterPolicyBinding)
if !castOK {
castErr := errors.NewInvalid("ClusterPolicyBinding", name, []error{})
return &authorizationapi.ClusterPolicyBinding{}, castErr
}
return clusterPolicyBinding, nil
}
func newReadOnlyClusterPolicyBindings(cache readOnlyAuthorizationCache) client.ReadOnlyClusterPolicyBindingInterface {
return cache.readOnlyClusterPolicyBindingCache
}
| yepengxj/df_st_origin1 | pkg/authorization/cache/clusterpolicybinding.go | GO | apache-2.0 | 4,026 |
//// [tests/cases/compiler/pathMappingBasedModuleResolution5_classic.ts] ////
//// [file1.ts]
import {x} from "folder2/file1"
import {y} from "folder3/file2"
import {z} from "components/file3"
import {z1} from "file4"
declare function use(a: any): void;
use(x.toExponential());
use(y.toExponential());
use(z.toExponential());
use(z1.toExponential());
//// [file1.ts]
export var x = 1;
//// [file2.ts]
export var y = 1;
//// [file3.ts]
export var z = 1;
//// [file4.ts]
export var z1 = 1;
//// [file1.js]
define(["require", "exports"], function (require, exports) {
"use strict";
exports.__esModule = true;
exports.x = 1;
});
//// [file2.js]
define(["require", "exports"], function (require, exports) {
"use strict";
exports.__esModule = true;
exports.y = 1;
});
//// [file3.js]
define(["require", "exports"], function (require, exports) {
"use strict";
exports.__esModule = true;
exports.z = 1;
});
//// [file4.js]
define(["require", "exports"], function (require, exports) {
"use strict";
exports.__esModule = true;
exports.z1 = 1;
});
//// [file1.js]
define(["require", "exports", "folder2/file1", "folder3/file2", "components/file3", "file4"], function (require, exports, file1_1, file2_1, file3_1, file4_1) {
"use strict";
exports.__esModule = true;
use(file1_1.x.toExponential());
use(file2_1.y.toExponential());
use(file3_1.z.toExponential());
use(file4_1.z1.toExponential());
});
| weswigham/TypeScript | tests/baselines/reference/pathMappingBasedModuleResolution5_classic.js | JavaScript | apache-2.0 | 1,513 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.aws.ec2.features;
import static org.jclouds.aws.reference.FormParameters.ACTION;
import java.util.Set;
import javax.inject.Named;
import javax.ws.rs.FormParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import org.jclouds.Fallbacks.EmptySetOnNotFoundOr404;
import org.jclouds.aws.ec2.domain.AWSRunningInstance;
import org.jclouds.aws.ec2.xml.AWSDescribeInstancesResponseHandler;
import org.jclouds.aws.ec2.xml.AWSRunInstancesResponseHandler;
import org.jclouds.aws.filters.FormSigner;
import org.jclouds.ec2.binders.BindFiltersToIndexedFormParams;
import org.jclouds.ec2.binders.BindInstanceIdsToIndexedFormParams;
import org.jclouds.ec2.binders.IfNotNullBindAvailabilityZoneToFormParam;
import org.jclouds.ec2.domain.Reservation;
import org.jclouds.ec2.features.InstanceApi;
import org.jclouds.ec2.options.RunInstancesOptions;
import org.jclouds.javax.annotation.Nullable;
import org.jclouds.location.functions.RegionToEndpointOrProviderIfNull;
import org.jclouds.rest.annotations.BinderParam;
import org.jclouds.rest.annotations.EndpointParam;
import org.jclouds.rest.annotations.Fallback;
import org.jclouds.rest.annotations.FormParams;
import org.jclouds.rest.annotations.RequestFilters;
import org.jclouds.rest.annotations.VirtualHost;
import org.jclouds.rest.annotations.XMLResponseParser;
import com.google.common.collect.Multimap;
/**
* Provides access to EC2 Instance Services via their REST API.
* <p/>
*/
@RequestFilters(FormSigner.class)
@VirtualHost
public interface AWSInstanceApi extends InstanceApi {
@Named("DescribeInstances")
@Override
@POST
@Path("/")
@FormParams(keys = ACTION, values = "DescribeInstances")
@XMLResponseParser(AWSDescribeInstancesResponseHandler.class)
@Fallback(EmptySetOnNotFoundOr404.class)
Set<? extends Reservation<? extends AWSRunningInstance>> describeInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@BinderParam(BindInstanceIdsToIndexedFormParams.class) String... instanceIds);
@Named("DescribeInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "DescribeInstances")
@XMLResponseParser(AWSDescribeInstancesResponseHandler.class)
@Fallback(EmptySetOnNotFoundOr404.class)
Set<? extends Reservation<? extends AWSRunningInstance>> describeInstancesInRegionWithFilter(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@BinderParam(BindFiltersToIndexedFormParams.class) Multimap<String, String> filter);
@Named("RunInstances")
@Override
@POST
@Path("/")
@FormParams(keys = ACTION, values = "RunInstances")
@XMLResponseParser(AWSRunInstancesResponseHandler.class)
Reservation<? extends AWSRunningInstance> runInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@Nullable @BinderParam(IfNotNullBindAvailabilityZoneToFormParam.class) String nullableAvailabilityZone,
@FormParam("ImageId") String imageId, @FormParam("MinCount") int minCount,
@FormParam("MaxCount") int maxCount, RunInstancesOptions... options);
}
| yanzhijun/jclouds-aliyun | providers/aws-ec2/src/main/java/org/jclouds/aws/ec2/features/AWSInstanceApi.java | Java | apache-2.0 | 4,022 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.direct;
import java.nio.ByteBuffer;
import java.util.BitSet;
import java.util.Collection;
import java.util.Map;
import java.util.UUID;
import org.apache.ignite.internal.direct.state.DirectMessageState;
import org.apache.ignite.internal.direct.state.DirectMessageStateItem;
import org.apache.ignite.internal.direct.stream.DirectByteBufferStream;
import org.apache.ignite.internal.direct.stream.v1.DirectByteBufferStreamImplV1;
import org.apache.ignite.internal.direct.stream.v2.DirectByteBufferStreamImplV2;
import org.apache.ignite.internal.direct.stream.v3.DirectByteBufferStreamImplV3;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteOutClosure;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.jetbrains.annotations.Nullable;
/**
* Message writer implementation.
*/
public class DirectMessageWriter implements MessageWriter {
/** State. */
@GridToStringInclude
private final DirectMessageState<StateItem> state;
/** Protocol version. */
@GridToStringInclude
private final byte protoVer;
/**
* @param protoVer Protocol version.
*/
public DirectMessageWriter(final byte protoVer) {
state = new DirectMessageState<>(StateItem.class, new IgniteOutClosure<StateItem>() {
@Override public StateItem apply() {
return new StateItem(protoVer);
}
});
this.protoVer = protoVer;
}
/** {@inheritDoc} */
@Override public void setBuffer(ByteBuffer buf) {
state.item().stream.setBuffer(buf);
}
/** {@inheritDoc} */
@Override public void setCurrentWriteClass(Class<? extends Message> msgCls) {
// No-op.
}
/** {@inheritDoc} */
@Override public boolean writeHeader(short type, byte fieldCnt) {
DirectByteBufferStream stream = state.item().stream;
stream.writeShort(type);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeByte(String name, byte val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeByte(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeShort(String name, short val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeShort(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeInt(String name, int val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeInt(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeLong(String name, long val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeLong(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeFloat(String name, float val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeFloat(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeDouble(String name, double val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeDouble(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeChar(String name, char val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeChar(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeBoolean(String name, boolean val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeBoolean(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeByteArray(String name, @Nullable byte[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeByteArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeByteArray(String name, byte[] val, long off, int len) {
DirectByteBufferStream stream = state.item().stream;
stream.writeByteArray(val, off, len);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeShortArray(String name, @Nullable short[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeShortArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeIntArray(String name, @Nullable int[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeIntArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeLongArray(String name, @Nullable long[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeLongArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeLongArray(String name, long[] val, int len) {
DirectByteBufferStream stream = state.item().stream;
stream.writeLongArray(val, len);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeFloatArray(String name, @Nullable float[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeFloatArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeDoubleArray(String name, @Nullable double[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeDoubleArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeCharArray(String name, @Nullable char[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeCharArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeBooleanArray(String name, @Nullable boolean[] val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeBooleanArray(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeString(String name, String val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeString(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeBitSet(String name, BitSet val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeBitSet(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeUuid(String name, UUID val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeUuid(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeIgniteUuid(String name, IgniteUuid val) {
DirectByteBufferStream stream = state.item().stream;
stream.writeIgniteUuid(val);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean writeAffinityTopologyVersion(String name, AffinityTopologyVersion val) {
if (protoVer >= 3) {
DirectByteBufferStream stream = state.item().stream;
stream.writeAffinityTopologyVersion(val);
return stream.lastFinished();
}
return writeMessage(name, val);
}
/** {@inheritDoc} */
@Override public boolean writeMessage(String name, @Nullable Message msg) {
DirectByteBufferStream stream = state.item().stream;
stream.writeMessage(msg, this);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public <T> boolean writeObjectArray(String name, T[] arr, MessageCollectionItemType itemType) {
DirectByteBufferStream stream = state.item().stream;
stream.writeObjectArray(arr, itemType, this);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public <T> boolean writeCollection(String name, Collection<T> col, MessageCollectionItemType itemType) {
DirectByteBufferStream stream = state.item().stream;
stream.writeCollection(col, itemType, this);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public <K, V> boolean writeMap(String name, Map<K, V> map, MessageCollectionItemType keyType,
MessageCollectionItemType valType) {
DirectByteBufferStream stream = state.item().stream;
stream.writeMap(map, keyType, valType, this);
return stream.lastFinished();
}
/** {@inheritDoc} */
@Override public boolean isHeaderWritten() {
return state.item().hdrWritten;
}
/** {@inheritDoc} */
@Override public void onHeaderWritten() {
state.item().hdrWritten = true;
}
/** {@inheritDoc} */
@Override public int state() {
return state.item().state;
}
/** {@inheritDoc} */
@Override public void incrementState() {
state.item().state++;
}
/** {@inheritDoc} */
@Override public void beforeInnerMessageWrite() {
state.forward();
}
/** {@inheritDoc} */
@Override public void afterInnerMessageWrite(boolean finished) {
state.backward(finished);
}
/** {@inheritDoc} */
@Override public void reset() {
state.reset();
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(DirectMessageWriter.class, this);
}
/**
*/
private static class StateItem implements DirectMessageStateItem {
/** */
private final DirectByteBufferStream stream;
/** */
private int state;
/** */
private boolean hdrWritten;
/**
* @param protoVer Protocol version.
*/
public StateItem(byte protoVer) {
switch (protoVer) {
case 1:
stream = new DirectByteBufferStreamImplV1(null);
break;
case 2:
stream = new DirectByteBufferStreamImplV2(null);
break;
case 3:
stream = new DirectByteBufferStreamImplV3(null);
break;
default:
throw new IllegalStateException("Invalid protocol version: " + protoVer);
}
}
/** {@inheritDoc} */
@Override public void reset() {
state = 0;
hdrWritten = false;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(StateItem.class, this);
}
}
}
| BiryukovVA/ignite | modules/core/src/main/java/org/apache/ignite/internal/direct/DirectMessageWriter.java | Java | apache-2.0 | 12,148 |
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory() :
typeof define === 'function' && define.amd ? define(factory) :
(factory());
}(this, (function () { 'use strict';
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
Zone.__load_patch('bluebird', function (global, Zone) {
// TODO: @JiaLiPassion, we can automatically patch bluebird
// if global.Promise = Bluebird, but sometimes in nodejs,
// global.Promise is not Bluebird, and Bluebird is just be
// used by other libraries such as sequelize, so I think it is
// safe to just expose a method to patch Bluebird explicitly
var BLUEBIRD = 'bluebird';
Zone[Zone.__symbol__(BLUEBIRD)] = function patchBluebird(Bluebird) {
Bluebird.setScheduler(function (fn) {
Zone.current.scheduleMicroTask(BLUEBIRD, fn);
});
};
});
})));
| Geovanny0401/bookStore | bookstore-front/node_modules/zone.js/dist/zone-bluebird.js | JavaScript | apache-2.0 | 1,237 |
<?php namespace SleepingOwl\Admin;
use SleepingOwl\Html\FormBuilder;
use SleepingOwl\Html\HtmlBuilder;
use SleepingOwl\Admin\Menu\MenuItem;
use SleepingOwl\Admin\Models\ModelItem;
use SleepingOwl\Admin\Models\Models;
use Illuminate\Config\Repository;
use Illuminate\Filesystem\Filesystem;
use Illuminate\Routing\Router as IlluminateRouter;
use Symfony\Component\Finder\Finder;
use Illuminate\Routing\UrlGenerator;
/**
* Class Admin
*
* @package SleepingOwl\Admin
*/
class Admin
{
/**
* Bootstrap filename
*/
const BOOTSRAP_FILE = 'bootstrap.php';
/**
* @var Admin
*/
public static $instance;
/**
* @var string
*/
public $title;
/**
* @var Router
*/
public $router;
/**
* @var MenuItem
*/
public $menu;
/**
* @var Models
*/
public $models;
/**
* @var HtmlBuilder
*/
public $htmlBuilder;
/**
* @var FormBuilder
*/
public $formBuilder;
/**
* @var Finder
*/
protected $finder;
/**
* @var string
*/
protected $bootstrapDirectory;
/**
* @var Filesystem
*/
protected $filesystem;
/**
* @param HtmlBuilder $htmlBuilder
* @param FormBuilder $formBuilder
* @param Finder $finder
* @param Repository $config
* @param IlluminateRouter $illuminateRouter
* @param UrlGenerator $urlGenerator
* @param Filesystem $filesystem
*/
function __construct(HtmlBuilder $htmlBuilder, FormBuilder $formBuilder, Finder $finder, Repository $config,
IlluminateRouter $illuminateRouter, UrlGenerator $urlGenerator, Filesystem $filesystem)
{
static::$instance = $this;
$this->htmlBuilder = $htmlBuilder;
$this->formBuilder = $formBuilder;
$this->finder = $finder;
$this->filesystem = $filesystem;
$this->title = $config->get('admin.title');
$this->bootstrapDirectory = $config->get('admin.bootstrapDirectory');
$this->router = new Router($illuminateRouter, $config, $urlGenerator, $config->get('admin.prefix'));
$this->menu = new MenuItem;
$this->models = new Models;
$this->requireBootstrap();
}
/**
* @return Admin
*/
public static function instance()
{
if (is_null(static::$instance))
{
app('\SleepingOwl\Admin\Admin');
}
return static::$instance;
}
/**
*
*/
protected function requireBootstrap()
{
if (! $this->filesystem->isDirectory($this->bootstrapDirectory)) return;
$files = $this->finder->create()->files()->name('/^[^_].+\.php$/')->in($this->bootstrapDirectory);
$files->sort(function ($a)
{
return $a->getFilename() !== static::BOOTSRAP_FILE;
});
foreach ($files as $file)
{
$this->filesystem->requireOnce($file);
}
}
/**
* @param $class
* @return ModelItem
*/
public static function model($class)
{
$modelItem = new ModelItem($class);
return $modelItem;
}
/**
* @param null $model
* @return MenuItem
*/
public static function menu($model = null)
{
return new MenuItem($model);
}
/**
* @param string $content
* @param $title
* @return string
*/
public static function view($content, $title = null)
{
$controller = \App::make('SleepingOwl\Admin\Controllers\AdminController', ['disableFilters' => true]);
return $controller->renderCustomContent($title, $content);
}
} | dungbq89/lnews | vendor/sleeping-owl/admin/src/SleepingOwl/Admin/Admin.php | PHP | apache-2.0 | 3,169 |
Puppet::Type.newtype(:ec2_scalingpolicy) do
@doc = 'Type representing an EC2 scaling policy.'
ensurable
newparam(:name, namevar: true) do
desc 'The name of the scaling policy.'
validate do |value|
fail 'Scaling policies must have a name' if value == ''
fail 'name should be a String' unless value.is_a?(String)
end
end
newproperty(:scaling_adjustment) do
desc 'The amount to adjust the size of the group by.'
validate do |value|
fail 'scaling adjustment cannot be blank' if value == ''
end
munge do |value|
value.to_i
end
end
newproperty(:region) do
desc 'The region in which to launch the policy.'
validate do |value|
fail 'region should not contain spaces' if value =~ /\s/
fail 'region should not be blank' if value == ''
fail 'region should be a String' unless value.is_a?(String)
end
end
newproperty(:adjustment_type) do
desc 'The type of policy.'
validate do |value|
fail 'adjustment_type should not contain spaces' if value =~ /\s/
fail 'adjustment_type should not be blank' if value == ''
fail 'adjustment_type should be a String' unless value.is_a?(String)
end
end
newproperty(:auto_scaling_group) do
desc 'The auto scaling group to attach the policy to.'
validate do |value|
fail 'auto_scaling_group cannot be blank' if value == ''
fail 'auto_scaling_group should be a String' unless value.is_a?(String)
end
end
autorequire(:ec2_autoscalinggroup) do
self[:auto_scaling_group]
end
end
| MarsuperMammal/pw_gce | lib/puppet/type/ec2_scalingpolicy.rb | Ruby | apache-2.0 | 1,573 |
/**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.core.realtime;
public class RealtimeIntegrationTest {
public void endToEndTest(){
//start zk server
//setup cluster
//setup realtime resource
//start controller
//start participants
}
}
| pinotlytics/pinot | pinot-core/src/test/java/com/linkedin/pinot/core/realtime/RealtimeIntegrationTest.java | Java | apache-2.0 | 877 |
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.event;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EventObject;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.logging.Logger;
import com.vaadin.server.ErrorEvent;
import com.vaadin.server.ErrorHandler;
/**
* <code>EventRouter</code> class implementing the inheritable event listening
* model. For more information on the event model see the
* {@link com.vaadin.event package documentation}.
*
* @author Vaadin Ltd.
* @since 3.0
*/
@SuppressWarnings("serial")
public class EventRouter implements MethodEventSource {
/**
* List of registered listeners.
*/
private LinkedHashSet<ListenerMethod> listenerList = null;
/*
* Registers a new listener with the specified activation method to listen
* events generated by this component. Don't add a JavaDoc comment here, we
* use the default documentation from implemented interface.
*/
@Override
public void addListener(Class<?> eventType, Object object, Method method) {
if (listenerList == null) {
listenerList = new LinkedHashSet<ListenerMethod>();
}
listenerList.add(new ListenerMethod(eventType, object, method));
}
/*
* Registers a new listener with the specified named activation method to
* listen events generated by this component. Don't add a JavaDoc comment
* here, we use the default documentation from implemented interface.
*/
@Override
public void addListener(Class<?> eventType, Object object, String methodName) {
if (listenerList == null) {
listenerList = new LinkedHashSet<ListenerMethod>();
}
listenerList.add(new ListenerMethod(eventType, object, methodName));
}
/*
* Removes all registered listeners matching the given parameters. Don't add
* a JavaDoc comment here, we use the default documentation from implemented
* interface.
*/
@Override
public void removeListener(Class<?> eventType, Object target) {
if (listenerList != null) {
final Iterator<ListenerMethod> i = listenerList.iterator();
while (i.hasNext()) {
final ListenerMethod lm = i.next();
if (lm.matches(eventType, target)) {
i.remove();
return;
}
}
}
}
/*
* Removes the event listener methods matching the given given paramaters.
* Don't add a JavaDoc comment here, we use the default documentation from
* implemented interface.
*/
@Override
public void removeListener(Class<?> eventType, Object target, Method method) {
if (listenerList != null) {
final Iterator<ListenerMethod> i = listenerList.iterator();
while (i.hasNext()) {
final ListenerMethod lm = i.next();
if (lm.matches(eventType, target, method)) {
i.remove();
return;
}
}
}
}
/*
* Removes the event listener method matching the given given parameters.
* Don't add a JavaDoc comment here, we use the default documentation from
* implemented interface.
*/
@Override
public void removeListener(Class<?> eventType, Object target,
String methodName) {
// Find the correct method
final Method[] methods = target.getClass().getMethods();
Method method = null;
for (int i = 0; i < methods.length; i++) {
if (methods[i].getName().equals(methodName)) {
method = methods[i];
}
}
if (method == null) {
throw new IllegalArgumentException();
}
// Remove the listeners
if (listenerList != null) {
final Iterator<ListenerMethod> i = listenerList.iterator();
while (i.hasNext()) {
final ListenerMethod lm = i.next();
if (lm.matches(eventType, target, method)) {
i.remove();
return;
}
}
}
}
/**
* Removes all listeners from event router.
*/
public void removeAllListeners() {
listenerList = null;
}
/**
* Sends an event to all registered listeners. The listeners will decide if
* the activation method should be called or not.
*
* @param event
* the Event to be sent to all listeners.
*/
public void fireEvent(EventObject event) {
fireEvent(event, null);
}
/**
* Sends an event to all registered listeners. The listeners will decide if
* the activation method should be called or not.
* <p>
* If an error handler is set, the processing of other listeners will
* continue after the error handler method call unless the error handler
* itself throws an exception.
*
* @param event
* the Event to be sent to all listeners.
* @param errorHandler
* error handler to use to handle any exceptions thrown by
* listeners or null to let the exception propagate to the
* caller, preventing further listener calls
*/
public void fireEvent(EventObject event, ErrorHandler errorHandler) {
// It is not necessary to send any events if there are no listeners
if (listenerList != null) {
// Make a copy of the listener list to allow listeners to be added
// inside listener methods. Fixes #3605.
// Send the event to all listeners. The listeners themselves
// will filter out unwanted events.
final Object[] listeners = listenerList.toArray();
for (int i = 0; i < listeners.length; i++) {
ListenerMethod listenerMethod = (ListenerMethod) listeners[i];
if (null != errorHandler) {
try {
listenerMethod.receiveEvent(event);
} catch (Exception e) {
errorHandler.error(new ErrorEvent(e));
}
} else {
listenerMethod.receiveEvent(event);
}
}
}
}
/**
* Checks if the given Event type is listened by a listener registered to
* this router.
*
* @param eventType
* the event type to be checked
* @return true if a listener is registered for the given event type
*/
public boolean hasListeners(Class<?> eventType) {
if (listenerList != null) {
for (ListenerMethod lm : listenerList) {
if (lm.isType(eventType)) {
return true;
}
}
}
return false;
}
/**
* Returns all listeners that match or extend the given event type.
*
* @param eventType
* The type of event to return listeners for.
* @return A collection with all registered listeners. Empty if no listeners
* are found.
*/
public Collection<?> getListeners(Class<?> eventType) {
List<Object> listeners = new ArrayList<Object>();
if (listenerList != null) {
for (ListenerMethod lm : listenerList) {
if (lm.isOrExtendsType(eventType)) {
listeners.add(lm.getTarget());
}
}
}
return listeners;
}
private Logger getLogger() {
return Logger.getLogger(EventRouter.class.getName());
}
}
| shahrzadmn/vaadin | server/src/com/vaadin/event/EventRouter.java | Java | apache-2.0 | 8,333 |
/*
* FCKeditor - The text editor for Internet - http://www.fckeditor.net Copyright
* (C) 2003-2008 Frederico Caldeira Knabben == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your choice: -
* GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html - GNU Lesser General Public License
* Version 2.1 or later (the "LGPL") http://www.gnu.org/licenses/lgpl.html -
* Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html == END LICENSE ==
*
* Useful functions used by almost all dialog window pages. Dialogs should link
* to this file as the very first script on the page.
*/
// Automatically detect the correct document.domain (#123).
(function() {
var d = document.domain;
while (true) {
// Test if we can access a parent property.
try {
var test = window.parent.document.domain;
break;
} catch (e) {
}
// Remove a domain part: www.mytest.example.com => mytest.example.com =>
// example.com ...
d = d.replace(/.*?(?:\.|$)/, '');
if (d.length == 0)
break; // It was not able to detect the domain.
try {
document.domain = d;
} catch (e) {
break;
}
}
})();
// Attention: FCKConfig must be available in the page.
function GetCommonDialogCss(prefix) {
// CSS minified by http://iceyboard.no-ip.org/projects/css_compressor (see
// _dev/css_compression.txt).
return FCKConfig.BasePath
+ 'dialog/common/'
+ '|.ImagePreviewArea{border:#000 1px solid;overflow:auto;width:100%;height:170px;background-color:#fff}.FlashPreviewArea{border:#000 1px solid;padding:5px;overflow:auto;width:100%;height:170px;background-color:#fff}.BtnReset{float:left;background-position:center center;background-image:url(images/reset.gif);width:16px;height:16px;background-repeat:no-repeat;border:1px none;font-size:1px}.BtnLocked,.BtnUnlocked{float:left;background-position:center center;background-image:url(images/locked.gif);width:16px;height:16px;background-repeat:no-repeat;border:none 1px;font-size:1px}.BtnUnlocked{background-image:url(images/unlocked.gif)}.BtnOver{border:outset 1px;cursor:pointer;cursor:hand}';
}
// Gets a element by its Id. Used for shorter coding.
function GetE(elementId) {
return document.getElementById(elementId);
}
function ShowE(element, isVisible) {
if (typeof(element) == 'string')
element = GetE(element);
element.style.display = isVisible ? '' : 'none';
}
function SetAttribute(element, attName, attValue) {
if (attValue == null || attValue.length == 0)
element.removeAttribute(attName, 0); // 0 : Case Insensitive
else
element.setAttribute(attName, attValue, 0); // 0 : Case Insensitive
}
function GetAttribute(element, attName, valueIfNull) {
var oAtt = element.attributes[attName];
if (oAtt == null || !oAtt.specified)
return valueIfNull ? valueIfNull : '';
var oValue = element.getAttribute(attName, 2);
if (oValue == null)
oValue = oAtt.nodeValue;
return (oValue == null ? valueIfNull : oValue);
}
function SelectField(elementId) {
var element = GetE(elementId);
element.focus();
// element.select may not be available for some fields (like <select>).
if (element.select)
element.select();
}
// Functions used by text fields to accept numbers only.
var IsDigit = (function() {
var KeyIdentifierMap = {
End : 35,
Home : 36,
Left : 37,
Right : 39,
'U+00007F' : 46
// Delete
};
return function(e) {
if (!e)
e = event;
var iCode = (e.keyCode || e.charCode);
if (!iCode && e.keyIdentifier && (e.keyIdentifier in KeyIdentifierMap))
iCode = KeyIdentifierMap[e.keyIdentifier];
return ((iCode >= 48 && iCode <= 57) // Numbers
|| (iCode >= 35 && iCode <= 40) // Arrows, Home, End
|| iCode == 8 // Backspace
|| iCode == 46 // Delete
|| iCode == 9 // Tab
);
}
})();
String.prototype.Trim = function() {
return this.replace(/(^\s*)|(\s*$)/g, '');
}
String.prototype.StartsWith = function(value) {
return (this.substr(0, value.length) == value);
}
String.prototype.Remove = function(start, length) {
var s = '';
if (start > 0)
s = this.substring(0, start);
if (start + length < this.length)
s += this.substring(start + length, this.length);
return s;
}
String.prototype.ReplaceAll = function(searchArray, replaceArray) {
var replaced = this;
for (var i = 0; i < searchArray.length; i++) {
replaced = replaced.replace(searchArray[i], replaceArray[i]);
}
return replaced;
}
function OpenFileBrowser(url, width, height) {
// oEditor must be defined.
var iLeft = (oEditor.FCKConfig.ScreenWidth - width) / 2;
var iTop = (oEditor.FCKConfig.ScreenHeight - height) / 2;
var sOptions = "toolbar=no,status=no,resizable=yes,dependent=yes,scrollbars=yes";
sOptions += ",width=" + width;
sOptions += ",height=" + height;
sOptions += ",left=" + iLeft;
sOptions += ",top=" + iTop;
window.open(url, 'FCKBrowseWindow', sOptions);
}
/**
* Utility function to create/update an element with a name attribute in IE, so
* it behaves properly when moved around It also allows to change the name or
* other special attributes in an existing node oEditor : instance of FCKeditor
* where the element will be created oOriginal : current element being edited or
* null if it has to be created nodeName : string with the name of the element
* to create oAttributes : Hash object with the attributes that must be set at
* creation time in IE Those attributes will be set also after the element has
* been created for any other browser to avoid redudant code
*/
function CreateNamedElement(oEditor, oOriginal, nodeName, oAttributes) {
var oNewNode;
// IE doesn't allow easily to change properties of an existing object,
// so remove the old and force the creation of a new one.
var oldNode = null;
if (oOriginal && oEditor.FCKBrowserInfo.IsIE) {
// Force the creation only if some of the special attributes have
// changed:
var bChanged = false;
for (var attName in oAttributes)
bChanged |= (oOriginal.getAttribute(attName, 2) != oAttributes[attName]);
if (bChanged) {
oldNode = oOriginal;
oOriginal = null;
}
}
// If the node existed (and it's not IE), then we just have to update its
// attributes
if (oOriginal) {
oNewNode = oOriginal;
} else {
// #676, IE doesn't play nice with the name or type attribute
if (oEditor.FCKBrowserInfo.IsIE) {
var sbHTML = [];
sbHTML.push('<' + nodeName);
for (var prop in oAttributes) {
sbHTML.push(' ' + prop + '="' + oAttributes[prop] + '"');
}
sbHTML.push('>');
if (!oEditor.FCKListsLib.EmptyElements[nodeName.toLowerCase()])
sbHTML.push('</' + nodeName + '>');
oNewNode = oEditor.FCK.EditorDocument
.createElement(sbHTML.join(''));
// Check if we are just changing the properties of an existing node:
// copy its properties
if (oldNode) {
CopyAttributes(oldNode, oNewNode, oAttributes);
oEditor.FCKDomTools.MoveChildren(oldNode, oNewNode);
oldNode.parentNode.removeChild(oldNode);
oldNode = null;
if (oEditor.FCK.Selection.SelectionData) {
// Trick to refresh the selection object and avoid error in
// fckdialog.html Selection.EnsureSelection
var oSel = oEditor.FCK.EditorDocument.selection;
oEditor.FCK.Selection.SelectionData = oSel.createRange(); // Now
// oSel.type
// will
// be
// 'None'
// reflecting
// the
// real
// situation
}
}
oNewNode = oEditor.FCK.InsertElement(oNewNode);
// FCK.Selection.SelectionData is broken by now since we've
// deleted the previously selected element. So we need to reassign
// it.
if (oEditor.FCK.Selection.SelectionData) {
var range = oEditor.FCK.EditorDocument.body
.createControlRange();
range.add(oNewNode);
oEditor.FCK.Selection.SelectionData = range;
}
} else {
oNewNode = oEditor.FCK.InsertElement(nodeName);
}
}
// Set the basic attributes
for (var attName in oAttributes)
oNewNode.setAttribute(attName, oAttributes[attName], 0); // 0 : Case
// Insensitive
return oNewNode;
}
// Copy all the attributes from one node to the other, kinda like a clone
// But oSkipAttributes is an object with the attributes that must NOT be copied
function CopyAttributes(oSource, oDest, oSkipAttributes) {
var aAttributes = oSource.attributes;
for (var n = 0; n < aAttributes.length; n++) {
var oAttribute = aAttributes[n];
if (oAttribute.specified) {
var sAttName = oAttribute.nodeName;
// We can set the type only once, so do it with the proper value,
// not copying it.
if (sAttName in oSkipAttributes)
continue;
var sAttValue = oSource.getAttribute(sAttName, 2);
if (sAttValue == null)
sAttValue = oAttribute.nodeValue;
oDest.setAttribute(sAttName, sAttValue, 0); // 0 : Case Insensitive
}
}
// The style:
oDest.style.cssText = oSource.style.cssText;
}
| zhangjunfang/eclipse-dir | nsp/src/main/webapp/scripts/lib/fckeditor/editor/dialog/common/fck_dialog_common.js | JavaScript | bsd-2-clause | 8,894 |
/*
* Copyright (c) 2004-2006 The Regents of The University of Michigan
* Copyright (c) 2010 The University of Edinburgh
* Copyright (c) 2012 Mark D. Hill and David A. Wood
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Kevin Lim
* Timothy M. Jones
*/
#include "cpu/pred/2bit_local.hh"
#include "cpu/pred/bi_mode.hh"
#include "cpu/pred/bpred_unit_impl.hh"
#include "cpu/pred/tournament.hh"
BPredUnit *
BranchPredictorParams::create()
{
// Setup the selected predictor.
if (predType == "local") {
return new LocalBP(this);
} else if (predType == "tournament") {
return new TournamentBP(this);
} else if (predType == "bi-mode") {
return new BiModeBP(this);
} else {
fatal("Invalid BP selected!");
}
}
| hoangt/tpzsimul.gem5 | src/cpu/pred/bpred_unit.cc | C++ | bsd-3-clause | 2,246 |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.minidump_uploader.util;
import java.net.HttpURLConnection;
/**
* A factory class for creating a HttpURLConnection.
*/
public interface HttpURLConnectionFactory {
/**
* @param url the url to communicate with
* @return a HttpURLConnection to communicate with |url|
*/
HttpURLConnection createHttpURLConnection(String url);
}
| scheib/chromium | components/minidump_uploader/android/java/src/org/chromium/components/minidump_uploader/util/HttpURLConnectionFactory.java | Java | bsd-3-clause | 548 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2010 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
from datetime import datetime
import doctest
from gettext import NullTranslations
import unittest
from genshi.core import Attrs
from genshi.template import MarkupTemplate, Context
from genshi.filters.i18n import Translator, extract
from genshi.input import HTML
from genshi.compat import IS_PYTHON2, StringIO
class DummyTranslations(NullTranslations):
_domains = {}
def __init__(self, catalog=()):
NullTranslations.__init__(self)
self._catalog = catalog or {}
self.plural = lambda n: n != 1
def add_domain(self, domain, catalog):
translation = DummyTranslations(catalog)
translation.add_fallback(self)
self._domains[domain] = translation
def _domain_call(self, func, domain, *args, **kwargs):
return getattr(self._domains.get(domain, self), func)(*args, **kwargs)
if IS_PYTHON2:
def ugettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.ugettext(message)
return unicode(message)
return tmsg
else:
def gettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.gettext(message)
return unicode(message)
return tmsg
if IS_PYTHON2:
def dugettext(self, domain, message):
return self._domain_call('ugettext', domain, message)
else:
def dgettext(self, domain, message):
return self._domain_call('gettext', domain, message)
def ungettext(self, msgid1, msgid2, n):
try:
return self._catalog[(msgid1, self.plural(n))]
except KeyError:
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
if not IS_PYTHON2:
ngettext = ungettext
del ungettext
if IS_PYTHON2:
def dungettext(self, domain, singular, plural, numeral):
return self._domain_call('ungettext', domain, singular, plural, numeral)
else:
def dngettext(self, domain, singular, plural, numeral):
return self._domain_call('ngettext', domain, singular, plural, numeral)
class TranslatorTestCase(unittest.TestCase):
def test_translate_included_attribute_text(self):
"""
Verify that translated attributes end up in a proper `Attrs` instance.
"""
html = HTML(u"""<html>
<span title="Foo"></span>
</html>""")
translator = Translator(lambda s: u"Voh")
stream = list(html.filter(translator))
kind, data, pos = stream[2]
assert isinstance(data[1], Attrs)
def test_extract_without_text(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<p title="Bar">Foo</p>
${ngettext("Singular", "Plural", num)}
</html>""")
translator = Translator(extract_text=False)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Singular', 'Plural', None), []),
messages[0])
def test_extract_plural_form(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
${ngettext("Singular", "Plural", num)}
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, 'ngettext', ('Singular', 'Plural', None), []),
messages[0])
def test_extract_funky_plural_form(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
${ngettext(len(items), *widget.display_names)}
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, 'ngettext', (None, None), []), messages[0])
def test_extract_gettext_with_unicode_string(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
${gettext("Grüße")}
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, 'gettext', u'Gr\xfc\xdfe', []), messages[0])
def test_extract_included_attribute_text(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<span title="Foo"></span>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, 'Foo', []), messages[0])
def test_extract_attribute_expr(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<input type="submit" value="${_('Save')}" />
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, '_', 'Save', []), messages[0])
def test_extract_non_included_attribute_interpolated(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<a href="#anchor_${num}">Foo</a>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, 'Foo', []), messages[0])
def test_extract_text_from_sub(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<py:if test="foo">Foo</py:if>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, 'Foo', []), messages[0])
def test_ignore_tag_with_fixed_xml_lang(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<p xml:lang="en">(c) 2007 Edgewall Software</p>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(0, len(messages))
def test_extract_tag_with_variable_xml_lang(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<p xml:lang="${lang}">(c) 2007 Edgewall Software</p>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, '(c) 2007 Edgewall Software', []),
messages[0])
def test_ignore_attribute_with_expression(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<input type="submit" value="Reply" title="Reply to comment $num" />
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(0, len(messages))
def test_translate_with_translations_object(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
translator = Translator(DummyTranslations({'Foo': 'Voh'}))
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
</html>""", tmpl.generate().render())
class MsgDirectiveTestCase(unittest.TestCase):
def test_extract_i18n_msg(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help] for details.', messages[0][2])
def test_translate_i18n_msg(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für Details siehe bitte <a href="help.html">Hilfe</a>.</p>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Please see <a href="help.html">Help</a></p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help]', messages[0][2])
def test_translate_i18n_msg_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Please see <a href="help.html">Help</a></p>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für Details siehe bitte <a href="help.html">Hilfe</a></p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_elt_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg>Please see <a href="help.html">Help</a></i18n:msg>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help]', messages[0][2])
def test_translate_i18n_msg_elt_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg>Please see <a href="help.html">Help</a></i18n:msg>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
Für Details siehe bitte <a href="help.html">Hilfe</a>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="A helpful paragraph">
Please see <a href="help.html" title="Click for help">Help</a>
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(3, len(messages))
self.assertEqual('A helpful paragraph', messages[0][2])
self.assertEqual(3, messages[0][0])
self.assertEqual('Click for help', messages[1][2])
self.assertEqual(4, messages[1][0])
self.assertEqual('Please see [1:Help]', messages[2][2])
self.assertEqual(3, messages[2][0])
def test_translate_i18n_msg_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="A helpful paragraph">
Please see <a href="help.html" title="Click for help">Help</a>
</p>
</html>""")
translator = Translator(lambda msgid: {
'A helpful paragraph': 'Ein hilfreicher Absatz',
'Click for help': u'Klicken für Hilfe',
'Please see [1:Help]': u'Siehe bitte [1:Hilfe]'
}[msgid])
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Ein hilfreicher Absatz">Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a></p>
</html>""", tmpl.generate().render(encoding=None))
def test_extract_i18n_msg_with_dynamic_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="${_('A helpful paragraph')}">
Please see <a href="help.html" title="${_('Click for help')}">Help</a>
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(3, len(messages))
self.assertEqual('A helpful paragraph', messages[0][2])
self.assertEqual(3, messages[0][0])
self.assertEqual('Click for help', messages[1][2])
self.assertEqual(4, messages[1][0])
self.assertEqual('Please see [1:Help]', messages[2][2])
self.assertEqual(3, messages[2][0])
def test_translate_i18n_msg_with_dynamic_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="${_('A helpful paragraph')}">
Please see <a href="help.html" title="${_('Click for help')}">Help</a>
</p>
</html>""")
translator = Translator(lambda msgid: {
'A helpful paragraph': 'Ein hilfreicher Absatz',
'Click for help': u'Klicken für Hilfe',
'Please see [1:Help]': u'Siehe bitte [1:Hilfe]'
}[msgid])
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Ein hilfreicher Absatz">Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a></p>
</html>""", tmpl.generate(_=translator.translate).render(encoding=None))
def test_extract_i18n_msg_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="">
Please see <a href="help.html" title="Click for help">Help</a>
</i18n:msg>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual('Click for help', messages[0][2])
self.assertEqual(4, messages[0][0])
self.assertEqual('Please see [1:Help]', messages[1][2])
self.assertEqual(3, messages[1][0])
def test_translate_i18n_msg_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="">
Please see <a href="help.html" title="Click for help">Help</a>
</i18n:msg>
</html>""")
translator = Translator(lambda msgid: {
'Click for help': u'Klicken für Hilfe',
'Please see [1:Help]': u'Siehe bitte [1:Hilfe]'
}[msgid])
translator.setup(tmpl)
self.assertEqual(u"""<html>
Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a>
</html>""", tmpl.generate().render(encoding=None))
def test_extract_i18n_msg_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html"><em>Help</em> page</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:[2:Help] page] for details.',
messages[0][2])
def test_translate_i18n_msg_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html"><em>Help</em> page</a> for details.
</p>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:[2:Hilfeseite]]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für Details siehe bitte <a href="help.html"><em>Hilfeseite</em></a>.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_label_with_nested_input(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:msg="">
<label><input type="text" size="3" name="daysback" value="30" /> days back</label>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('[1:[2:] days back]',
messages[0][2])
def test_translate_i18n_msg_label_with_nested_input(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:msg="">
<label><input type="text" size="3" name="daysback" value="30" /> foo bar</label>
</div>
</html>""")
gettext = lambda s: "[1:[2:] foo bar]"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual("""<html>
<div><label><input type="text" size="3" name="daysback" value="30"/> foo bar</label></div>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Show me [1:] entries per page.', messages[0][2])
def test_translate_i18n_msg_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page.
</p>
</html>""")
gettext = lambda s: u"[1:] Einträge pro Seite anzeigen."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><input type="text" name="num"/> Einträge pro Seite anzeigen.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_multiple(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for <em>details</em>.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help] for [2:details].', messages[0][2])
def test_translate_i18n_msg_multiple(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for <em>details</em>.
</p>
</html>""")
gettext = lambda s: u"Für [2:Details] siehe bitte [1:Hilfe]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für <em>Details</em> siehe bitte <a href="help.html">Hilfe</a>.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_multiple_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page, starting at page <input type="text" name="num" />.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Show me [1:] entries per page, starting at page [2:].',
messages[0][2])
def test_translate_i18n_msg_multiple_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page, starting at page <input type="text" name="num" />.
</p>
</html>""", encoding='utf-8')
gettext = lambda s: u"[1:] Einträge pro Seite, beginnend auf Seite [2:]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><input type="text" name="num"/> Eintr\u00E4ge pro Seite, beginnend auf Seite <input type="text" name="num"/>.</p>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_with_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name">
Hello, ${user.name}!
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Hello, %(name)s!', messages[0][2])
def test_translate_i18n_msg_with_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name">
Hello, ${user.name}!
</p>
</html>""")
gettext = lambda s: u"Hallo, %(name)s!"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Hallo, Jim!</p>
</html>""", tmpl.generate(user=dict(name='Jim')).render())
def test_translate_i18n_msg_with_param_reordered(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name">
Hello, ${user.name}!
</p>
</html>""")
gettext = lambda s: u"%(name)s, sei gegrüßt!"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Jim, sei gegrüßt!</p>
</html>""", tmpl.generate(user=dict(name='Jim')).render())
def test_translate_i18n_msg_with_attribute_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Hello, <a href="#${anchor}">dude</a>!
</p>
</html>""")
gettext = lambda s: u"Sei gegrüßt, [1:Alter]!"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Sei gegrüßt, <a href="#42">Alter</a>!</p>
</html>""", tmpl.generate(anchor='42').render())
def test_extract_i18n_msg_with_two_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name, time">
Posted by ${post.author} at ${entry.time.strftime('%H:%m')}
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Posted by %(name)s at %(time)s', messages[0][2])
def test_translate_i18n_msg_with_two_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name, time">
Written by ${entry.author} at ${entry.time.strftime('%H:%M')}
</p>
</html>""")
gettext = lambda s: u"%(name)s schrieb dies um %(time)s"
translator = Translator(gettext)
translator.setup(tmpl)
entry = {
'author': 'Jim',
'time': datetime(2008, 4, 1, 14, 30)
}
self.assertEqual("""<html>
<p>Jim schrieb dies um 14:30</p>
</html>""", tmpl.generate(entry=entry).render())
def test_extract_i18n_msg_with_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" py:attrs="{'value': x}" /> entries per page.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Show me [1:] entries per page.', messages[0][2])
def test_translate_i18n_msg_with_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" py:attrs="{'value': 'x'}" /> entries per page.
</p>
</html>""")
gettext = lambda s: u"[1:] Einträge pro Seite anzeigen."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><input type="text" name="num" value="x"/> Einträge pro Seite anzeigen.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_with_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:comment="As in foo bar" i18n:msg="">Foo</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Foo', ['As in foo bar']), messages[0])
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Foo', ['As in foo bar']), messages[0])
def test_translate_i18n_msg_with_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
gettext = lambda s: u"Voh"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_with_attr(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="Foo bar">Foo</p>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, None, 'Foo bar', []), messages[0])
self.assertEqual((3, None, 'Foo', []), messages[1])
def test_translate_i18n_msg_with_attr(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="Foo bar">Foo</p>
</html>""")
gettext = lambda s: u"Voh"
translator = Translator(DummyTranslations({
'Foo': 'Voh',
'Foo bar': u'Voh bär'
}))
tmpl.filters.insert(0, translator)
tmpl.add_directives(Translator.NAMESPACE, translator)
self.assertEqual(u"""<html>
<p title="Voh bär">Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_msg_and_py_strip_directives(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:strip="">Foo</p>
<p py:strip="" i18n:msg="">Foo</p>
</html>""")
translator = Translator(DummyTranslations({'Foo': 'Voh'}))
translator.setup(tmpl)
self.assertEqual("""<html>
Voh
Voh
</html>""", tmpl.generate().render())
def test_i18n_msg_ticket_300_extract(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="date, author">
Changed ${ '10/12/2008' } ago by ${ 'me, the author' }
</i18n:msg>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
(3, None, 'Changed %(date)s ago by %(author)s', []), messages[0]
)
def test_i18n_msg_ticket_300_translate(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="date, author">
Changed ${ date } ago by ${ author }
</i18n:msg>
</html>""")
translations = DummyTranslations({
'Changed %(date)s ago by %(author)s': u'Modificado à %(date)s por %(author)s'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
Modificado à um dia por Pedro
</html>""".encode('utf-8'), tmpl.generate(date='um dia', author="Pedro").render(encoding='utf-8'))
def test_i18n_msg_ticket_251_extract(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg=""><tt><b>Translation[ 0 ]</b>: <em>One coin</em></tt></p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
(3, None, u'[1:[2:Translation\\[\xa00\xa0\\]]: [3:One coin]]', []), messages[0]
)
def test_i18n_msg_ticket_251_translate(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg=""><tt><b>Translation[ 0 ]</b>: <em>One coin</em></tt></p>
</html>""")
translations = DummyTranslations({
u'[1:[2:Translation\\[\xa00\xa0\\]]: [3:One coin]]':
u'[1:[2:Trandução\\[\xa00\xa0\\]]: [3:Uma moeda]]'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><tt><b>Trandução[ 0 ]</b>: <em>Uma moeda</em></tt></p>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_with_other_directives_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:with="q = quote_plus(message[:80])">Before you do that, though, please first try
<strong><a href="${trac.homepage}search?ticket=yes&noquickjump=1&q=$q">searching</a>
for similar issues</strong>, as it is quite likely that this problem
has been reported before. For questions about installation
and configuration of Trac, please try the
<a href="${trac.homepage}wiki/MailingList">mailing list</a>
instead of filing a ticket.
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
'Before you do that, though, please first try\n '
'[1:[2:searching]\n for similar issues], as it is '
'quite likely that this problem\n has been reported '
'before. For questions about installation\n and '
'configuration of Trac, please try the\n '
'[3:mailing list]\n instead of filing a ticket.',
messages[0][2]
)
def test_translate_i18n_msg_with_other_directives_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Before you do that, though, please first try
<strong><a href="${trac.homepage}search?ticket=yes&noquickjump=1&q=q">searching</a>
for similar issues</strong>, as it is quite likely that this problem
has been reported before. For questions about installation
and configuration of Trac, please try the
<a href="${trac.homepage}wiki/MailingList">mailing list</a>
instead of filing a ticket.
</p>
</html>""")
translations = DummyTranslations({
'Before you do that, though, please first try\n '
'[1:[2:searching]\n for similar issues], as it is '
'quite likely that this problem\n has been reported '
'before. For questions about installation\n and '
'configuration of Trac, please try the\n '
'[3:mailing list]\n instead of filing a ticket.':
u'Antes de o fazer, porém,\n '
u'[1:por favor tente [2:procurar]\n por problemas semelhantes], uma vez que '
u'é muito provável que este problema\n já tenha sido reportado '
u'anteriormente. Para questões relativas à instalação\n e '
u'configuração do Trac, por favor tente a\n '
u'[3:mailing list]\n em vez de criar um assunto.'
})
translator = Translator(translations)
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
ctx = Context()
ctx.push({'trac': {'homepage': 'http://trac.edgewall.org/'}})
self.assertEqual(u"""<html>
<p>Antes de o fazer, porém,
<strong>por favor tente <a href="http://trac.edgewall.org/search?ticket=yes&noquickjump=1&q=q">procurar</a>
por problemas semelhantes</strong>, uma vez que é muito provável que este problema
já tenha sido reportado anteriormente. Para questões relativas à instalação
e configuração do Trac, por favor tente a
<a href="http://trac.edgewall.org/wiki/MailingList">mailing list</a>
em vez de criar um assunto.</p>
</html>""", tmpl.generate(ctx).render())
def test_i18n_msg_with_other_nested_directives_with_reordered_content(self):
# See: http://genshi.edgewall.org/ticket/300#comment:10
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p py:if="not editable" class="hint" i18n:msg="">
<strong>Note:</strong> This repository is defined in
<code><a href="${ 'href.wiki(TracIni)' }">trac.ini</a></code>
and cannot be edited on this page.
</p>
</html>""")
translations = DummyTranslations({
'[1:Note:] This repository is defined in\n '
'[2:[3:trac.ini]]\n and cannot be edited on this page.':
u'[1:Nota:] Este repositório está definido em \n '
u'[2:[3:trac.ini]]\n e não pode ser editado nesta página.',
})
translator = Translator(translations)
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
'[1:Note:] This repository is defined in\n '
'[2:[3:trac.ini]]\n and cannot be edited on this page.',
messages[0][2]
)
self.assertEqual(u"""<html>
<p class="hint"><strong>Nota:</strong> Este repositório está definido em
<code><a href="href.wiki(TracIni)">trac.ini</a></code>
e não pode ser editado nesta página.</p>
</html>""".encode('utf-8'), tmpl.generate(editable=False).render(encoding='utf-8'))
def test_extract_i18n_msg_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:strip="">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Please see [1:Help] for details.', []),
messages[0])
def test_extract_i18n_msg_with_py_strip_and_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:strip="" i18n:comment="Foo">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Please see [1:Help] for details.',
['Foo']), messages[0])
def test_translate_i18n_msg_and_comment_with_py_strip_directives(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar" py:strip="">Foo</p>
<p py:strip="" i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
translator = Translator(DummyTranslations({'Foo': 'Voh'}))
translator.setup(tmpl)
self.assertEqual("""<html>
Voh
Voh
</html>""", tmpl.generate().render())
def test_translate_i18n_msg_ticket_404(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="first,second">
$first <span>$second</span> KEPT <span>Inside a tag</span> tail
</p></html>""")
translator = Translator(DummyTranslations())
translator.setup(tmpl)
self.assertEqual("""<html>
<p>FIRST <span>SECOND</span> KEPT <span>Inside a tag</span> tail"""
"""</p></html>""",
tmpl.generate(first="FIRST", second="SECOND").render())
class ChooseDirectiveTestCase(unittest.TestCase):
def test_translate_i18n_choose_as_attribute(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
<div i18n:choose="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
</html>""")
translations = DummyTranslations()
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>FooBar</p>
</div>
<div>
<p>FooBars</p>
</div>
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
</html>""")
translations = DummyTranslations()
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>FooBars</p>
<p>FooBar</p>
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_as_directive_singular_and_plural_with_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<p i18n:singular="" py:strip="">FooBar Singular with Strip</p>
<p i18n:plural="">FooBars Plural without Strip</p>
</i18n:choose>
<i18n:choose numeral="two">
<p i18n:singular="">FooBar singular without strip</p>
<p i18n:plural="" py:strip="">FooBars plural with strip</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="">FooBar singular without strip</p>
<p i18n:plural="" py:strip="">FooBars plural with strip</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="" py:strip="">FooBar singular with strip</p>
<p i18n:plural="">FooBars plural without strip</p>
</i18n:choose>
</html>""")
translations = DummyTranslations()
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>FooBars Plural without Strip</p>
FooBars plural with strip
<p>FooBar singular without strip</p>
FooBar singular with strip
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_plural_singular_as_directive(self):
# Ticket 371
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<i18n:singular>FooBar</i18n:singular>
<i18n:plural>FooBars</i18n:plural>
</i18n:choose>
<i18n:choose numeral="one">
<i18n:singular>FooBar</i18n:singular>
<i18n:plural>FooBars</i18n:plural>
</i18n:choose>
</html>""")
translations = DummyTranslations({
('FooBar', 0): 'FuBar',
('FooBars', 1): 'FuBars',
'FooBar': 'FuBar',
'FooBars': 'FuBars',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
FuBars
FuBar
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_as_attribute_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_as_attribute_with_params_and_domain_as_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations()
translations.add_domain('foo', {
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_as_directive_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Vohs John Doe</p>
<p>Voh John Doe</p>
</html>""", tmpl.generate(one=1, two=2,
fname='John', lname='Doe').render())
def test_translate_i18n_choose_as_directive_with_params_and_domain_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:domain name="foo">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</i18n:domain>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translations = DummyTranslations()
translations.add_domain('foo', {
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Vohs John Doe</p>
<p>Foo John Doe</p>
</html>""", tmpl.generate(one=1, two=2,
fname='John', lname='Doe').render())
def test_extract_i18n_choose_as_attribute(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
<div i18n:choose="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0])
self.assertEqual((7, 'ngettext', ('FooBar', 'FooBars'), []), messages[1])
def test_extract_i18n_choose_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0])
self.assertEqual((7, 'ngettext', ('FooBar', 'FooBars'), []), messages[1])
def test_extract_i18n_choose_as_attribute_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
def test_extract_i18n_choose_as_attribute_with_params_and_domain_as_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((4, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
def test_extract_i18n_choose_as_directive_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
self.assertEqual((7, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[1])
def test_extract_i18n_choose_as_directive_with_params_and_domain_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:domain name="foo">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</i18n:domain>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((4, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
self.assertEqual((9, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[1])
def test_extract_i18n_choose_as_attribute_with_params_and_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname" i18n:comment="As in Foo Bar">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'),
['As in Foo Bar']),
messages[0])
def test_extract_i18n_choose_as_directive_with_params_and_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two" params="fname, lname" i18n:comment="As in Foo Bar">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'),
['As in Foo Bar']),
messages[0])
def test_extract_i18n_choose_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:choose="num; num" title="Things">
<i18n:singular>
There is <a href="$link" title="View thing">${num} thing</a>.
</i18n:singular>
<i18n:plural>
There are <a href="$link" title="View things">${num} things</a>.
</i18n:plural>
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(4, len(messages))
self.assertEqual((3, None, 'Things', []), messages[0])
self.assertEqual((5, None, 'View thing', []), messages[1])
self.assertEqual((8, None, 'View things', []), messages[2])
self.assertEqual(
(3, 'ngettext', ('There is [1:%(num)s thing].',
'There are [1:%(num)s things].'), []),
messages[3])
def test_translate_i18n_choose_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:choose="num; num" title="Things">
<i18n:singular>
There is <a href="$link" title="View thing">${num} thing</a>.
</i18n:singular>
<i18n:plural>
There are <a href="$link" title="View things">${num} things</a>.
</i18n:plural>
</p>
</html>""")
translations = DummyTranslations({
'Things': 'Sachen',
'View thing': 'Sache betrachten',
'View things': 'Sachen betrachten',
('There is [1:%(num)s thing].', 0): 'Da ist [1:%(num)s Sache].',
('There is [1:%(num)s thing].', 1): 'Da sind [1:%(num)s Sachen].'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Sachen">
Da ist <a href="/things" title="Sache betrachten">1 Sache</a>.
</p>
</html>""", tmpl.generate(link="/things", num=1).render(encoding=None))
self.assertEqual(u"""<html>
<p title="Sachen">
Da sind <a href="/things" title="Sachen betrachten">3 Sachen</a>.
</p>
</html>""", tmpl.generate(link="/things", num=3).render(encoding=None))
def test_extract_i18n_choose_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="num" params="num">
<p i18n:singular="" title="Things">
There is <a href="$link" title="View thing">${num} thing</a>.
</p>
<p i18n:plural="" title="Things">
There are <a href="$link" title="View things">${num} things</a>.
</p>
</i18n:choose>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(5, len(messages))
self.assertEqual((4, None, 'Things', []), messages[0])
self.assertEqual((5, None, 'View thing', []), messages[1])
self.assertEqual((7, None, 'Things', []), messages[2])
self.assertEqual((8, None, 'View things', []), messages[3])
self.assertEqual(
(3, 'ngettext', ('There is [1:%(num)s thing].',
'There are [1:%(num)s things].'), []),
messages[4])
def test_translate_i18n_choose_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="num" params="num">
<p i18n:singular="" title="Things">
There is <a href="$link" title="View thing">${num} thing</a>.
</p>
<p i18n:plural="" title="Things">
There are <a href="$link" title="View things">${num} things</a>.
</p>
</i18n:choose>
</html>""")
translations = DummyTranslations({
'Things': 'Sachen',
'View thing': 'Sache betrachten',
'View things': 'Sachen betrachten',
('There is [1:%(num)s thing].', 0): 'Da ist [1:%(num)s Sache].',
('There is [1:%(num)s thing].', 1): 'Da sind [1:%(num)s Sachen].'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Sachen">Da ist <a href="/things" title="Sache betrachten">1 Sache</a>.</p>
</html>""", tmpl.generate(link="/things", num=1).render(encoding=None))
self.assertEqual(u"""<html>
<p title="Sachen">Da sind <a href="/things" title="Sachen betrachten">3 Sachen</a>.</p>
</html>""", tmpl.generate(link="/things", num=3).render(encoding=None))
def test_translate_i18n_choose_and_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_and_domain_and_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations()
translations.add_domain('foo', {
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_and_singular_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="" py:strip="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
<div i18n:choose="one; fname, lname">
<p i18n:singular="" py:strip="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
<div>
Voh John Doe
</div>
</html>""", tmpl.generate(
one=1, two=2, fname='John',lname='Doe').render())
def test_translate_i18n_choose_and_plural_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="" py:strip="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
Voh John Doe
</div>
</html>""", tmpl.generate(two=1, fname='John', lname='Doe').render())
def test_extract_i18n_choose_as_attribute_and_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="one" py:strip="">
<p i18n:singular="" py:strip="">FooBar</p>
<p i18n:plural="" py:strip="">FooBars</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0])
class DomainDirectiveTestCase(unittest.TestCase):
def test_translate_i18n_domain_with_msg_directives(self):
#"""translate with i18n:domain and nested i18n:msg directives """
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:msg="">Bar</p>
</div>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>BarFoo</p>
<p>PT_Foo</p>
</div>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_inline_directives(self):
#"""translate with inlined i18n:domain and i18n:msg directives"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:domain="foo">FooBar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>BarFoo</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_without_msg_directives(self):
#"""translate domain call without i18n:msg directives still uses current domain"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Bar</p>
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:msg="">Bar</p>
<p>Bar</p>
</div>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
<div>
<p>BarFoo</p>
<p>PT_Foo</p>
<p>PT_Foo</p>
</div>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_as_directive_not_attribute(self):
#"""translate with domain as directive"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:domain name="foo">
<p i18n:msg="">FooBar</p>
<p i18n:msg="">Bar</p>
<p>Bar</p>
</i18n:domain>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>BarFoo</p>
<p>PT_Foo</p>
<p>PT_Foo</p>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_nested_directives(self):
#"""translate with nested i18n:domain directives"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Bar</p>
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:domain="bar" i18n:msg="">Bar</p>
<p>Bar</p>
</div>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'foo_Bar'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
<div>
<p>BarFoo</p>
<p>bar_Bar</p>
<p>foo_Bar</p>
</div>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_empty_nested_domain_directive(self):
#"""translate with empty nested i18n:domain directive does not use dngettext"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Bar</p>
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:domain="" i18n:msg="">Bar</p>
<p>Bar</p>
</div>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'foo_Bar'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
<div>
<p>BarFoo</p>
<p>Voh</p>
<p>foo_Bar</p>
</div>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_inline_directive_on_START_NS(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo">
<p i18n:msg="">FooBar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>BarFoo</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_inline_directive_on_START_NS_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo" py:strip="">
<p i18n:msg="">FooBar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""
<p>BarFoo</p>
""", tmpl.generate().render())
def test_translate_i18n_domain_with_nested_includes(self):
import os, shutil, tempfile
from genshi.template.loader import TemplateLoader
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
for idx in range(7):
file1 = open(os.path.join(dirname, 'tmpl%d.html' % idx), 'w')
try:
file1.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" py:strip="">
<div>Included tmpl$idx</div>
<p i18n:msg="idx">Bar $idx</p>
<p i18n:domain="bar">Bar</p>
<p i18n:msg="idx" i18n:domain="">Bar $idx</p>
<p i18n:domain="" i18n:msg="idx">Bar $idx</p>
<py:if test="idx < 6">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</py:if>
</html>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl10.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</html>""")
finally:
file2.close()
def callback(template):
translations = DummyTranslations({'Bar %(idx)s': 'Voh %(idx)s'})
translations.add_domain('foo', {'Bar %(idx)s': 'foo_Bar %(idx)s'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
translator.setup(template)
loader = TemplateLoader([dirname], callback=callback)
tmpl = loader.load('tmpl10.html')
self.assertEqual("""<html>
<div>Included tmpl0</div>
<p>foo_Bar 0</p>
<p>bar_Bar</p>
<p>Voh 0</p>
<p>Voh 0</p>
<div>Included tmpl1</div>
<p>foo_Bar 1</p>
<p>bar_Bar</p>
<p>Voh 1</p>
<p>Voh 1</p>
<div>Included tmpl2</div>
<p>foo_Bar 2</p>
<p>bar_Bar</p>
<p>Voh 2</p>
<p>Voh 2</p>
<div>Included tmpl3</div>
<p>foo_Bar 3</p>
<p>bar_Bar</p>
<p>Voh 3</p>
<p>Voh 3</p>
<div>Included tmpl4</div>
<p>foo_Bar 4</p>
<p>bar_Bar</p>
<p>Voh 4</p>
<p>Voh 4</p>
<div>Included tmpl5</div>
<p>foo_Bar 5</p>
<p>bar_Bar</p>
<p>Voh 5</p>
<p>Voh 5</p>
<div>Included tmpl6</div>
<p>foo_Bar 6</p>
<p>bar_Bar</p>
<p>Voh 6</p>
<p>Voh 6</p>
</html>""", tmpl.generate(idx=-1).render())
finally:
shutil.rmtree(dirname)
def test_translate_i18n_domain_with_nested_includes_with_translatable_attrs(self):
import os, shutil, tempfile
from genshi.template.loader import TemplateLoader
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
for idx in range(4):
file1 = open(os.path.join(dirname, 'tmpl%d.html' % idx), 'w')
try:
file1.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" py:strip="">
<div>Included tmpl$idx</div>
<p title="${dg('foo', 'Bar %(idx)s') % dict(idx=idx)}" i18n:msg="idx">Bar $idx</p>
<p title="Bar" i18n:domain="bar">Bar</p>
<p title="Bar" i18n:msg="idx" i18n:domain="">Bar $idx</p>
<p i18n:msg="idx" i18n:domain="" title="Bar">Bar $idx</p>
<p i18n:domain="" i18n:msg="idx" title="Bar">Bar $idx</p>
<py:if test="idx < 3">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</py:if>
</html>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl10.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</html>""")
finally:
file2.close()
translations = DummyTranslations({'Bar %(idx)s': 'Voh %(idx)s',
'Bar': 'Voh'})
translations.add_domain('foo', {'Bar %(idx)s': 'foo_Bar %(idx)s'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
def callback(template):
translator.setup(template)
loader = TemplateLoader([dirname], callback=callback)
tmpl = loader.load('tmpl10.html')
if IS_PYTHON2:
dgettext = translations.dugettext
else:
dgettext = translations.dgettext
self.assertEqual("""<html>
<div>Included tmpl0</div>
<p title="foo_Bar 0">foo_Bar 0</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 0</p>
<p title="Voh">Voh 0</p>
<p title="Voh">Voh 0</p>
<div>Included tmpl1</div>
<p title="foo_Bar 1">foo_Bar 1</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 1</p>
<p title="Voh">Voh 1</p>
<p title="Voh">Voh 1</p>
<div>Included tmpl2</div>
<p title="foo_Bar 2">foo_Bar 2</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 2</p>
<p title="Voh">Voh 2</p>
<p title="Voh">Voh 2</p>
<div>Included tmpl3</div>
<p title="foo_Bar 3">foo_Bar 3</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 3</p>
<p title="Voh">Voh 3</p>
<p title="Voh">Voh 3</p>
</html>""", tmpl.generate(idx=-1,
dg=dgettext).render())
finally:
shutil.rmtree(dirname)
class ExtractTestCase(unittest.TestCase):
def test_markup_template_extraction(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<head>
<title>Example</title>
</head>
<body>
<h1>Example</h1>
<p>${_("Hello, %(name)s") % dict(name=username)}</p>
<p>${ngettext("You have %d item", "You have %d items", num)}</p>
</body>
</html>""")
results = list(extract(buf, ['_', 'ngettext'], [], {}))
self.assertEqual([
(3, None, 'Example', []),
(6, None, 'Example', []),
(7, '_', 'Hello, %(name)s', []),
(8, 'ngettext', ('You have %d item', 'You have %d items', None),
[]),
], results)
def test_extraction_without_text(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<p title="Bar">Foo</p>
${ngettext("Singular", "Plural", num)}
</html>""")
results = list(extract(buf, ['_', 'ngettext'], [], {
'extract_text': 'no'
}))
self.assertEqual([
(3, 'ngettext', ('Singular', 'Plural', None), []),
], results)
def test_text_template_extraction(self):
buf = StringIO("""${_("Dear %(name)s") % {'name': name}},
${ngettext("Your item:", "Your items", len(items))}
#for item in items
* $item
#end
All the best,
Foobar""")
results = list(extract(buf, ['_', 'ngettext'], [], {
'template_class': 'genshi.template:TextTemplate'
}))
self.assertEqual([
(1, '_', 'Dear %(name)s', []),
(3, 'ngettext', ('Your item:', 'Your items', None), []),
(7, None, 'All the best,\n Foobar', [])
], results)
def test_extraction_with_keyword_arg(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
${gettext('Foobar', foo='bar')}
</html>""")
results = list(extract(buf, ['gettext'], [], {}))
self.assertEqual([
(2, 'gettext', ('Foobar'), []),
], results)
def test_extraction_with_nonstring_arg(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
${dgettext(curdomain, 'Foobar')}
</html>""")
results = list(extract(buf, ['dgettext'], [], {}))
self.assertEqual([
(2, 'dgettext', (None, 'Foobar'), []),
], results)
def test_extraction_inside_ignored_tags(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<script type="text/javascript">
$('#llist').tabs({
remote: true,
spinner: "${_('Please wait...')}"
});
</script>
</html>""")
results = list(extract(buf, ['_'], [], {}))
self.assertEqual([
(5, '_', 'Please wait...', []),
], results)
def test_extraction_inside_ignored_tags_with_directives(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<script type="text/javascript">
<py:if test="foobar">
alert("This shouldn't be extracted");
</py:if>
</script>
</html>""")
self.assertEqual([], list(extract(buf, ['_'], [], {})))
def test_extract_py_def_directive_with_py_strip(self):
# Failed extraction from Trac
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" py:strip="">
<py:def function="diff_options_fields(diff)">
<label for="style">View differences</label>
<select id="style" name="style">
<option selected="${diff.style == 'inline' or None}"
value="inline">inline</option>
<option selected="${diff.style == 'sidebyside' or None}"
value="sidebyside">side by side</option>
</select>
<div class="field">
Show <input type="text" name="contextlines" id="contextlines" size="2"
maxlength="3" value="${diff.options.contextlines < 0 and 'all' or diff.options.contextlines}" />
<label for="contextlines">lines around each change</label>
</div>
<fieldset id="ignore" py:with="options = diff.options">
<legend>Ignore:</legend>
<div class="field">
<input type="checkbox" id="ignoreblanklines" name="ignoreblanklines"
checked="${options.ignoreblanklines or None}" />
<label for="ignoreblanklines">Blank lines</label>
</div>
<div class="field">
<input type="checkbox" id="ignorecase" name="ignorecase"
checked="${options.ignorecase or None}" />
<label for="ignorecase">Case changes</label>
</div>
<div class="field">
<input type="checkbox" id="ignorewhitespace" name="ignorewhitespace"
checked="${options.ignorewhitespace or None}" />
<label for="ignorewhitespace">White space changes</label>
</div>
</fieldset>
<div class="buttons">
<input type="submit" name="update" value="${_('Update')}" />
</div>
</py:def></html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(10, len(messages))
self.assertEqual([
(3, None, 'View differences', []),
(6, None, 'inline', []),
(8, None, 'side by side', []),
(10, None, 'Show', []),
(13, None, 'lines around each change', []),
(16, None, 'Ignore:', []),
(20, None, 'Blank lines', []),
(25, None, 'Case changes',[]),
(30, None, 'White space changes', []),
(34, '_', 'Update', [])], messages)
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(Translator.__module__))
suite.addTest(unittest.makeSuite(TranslatorTestCase, 'test'))
suite.addTest(unittest.makeSuite(MsgDirectiveTestCase, 'test'))
suite.addTest(unittest.makeSuite(ChooseDirectiveTestCase, 'test'))
suite.addTest(unittest.makeSuite(DomainDirectiveTestCase, 'test'))
suite.addTest(unittest.makeSuite(ExtractTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| mitchellrj/genshi | genshi/filters/tests/i18n.py | Python | bsd-3-clause | 89,114 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/login/version_info_updater.h"
#include <vector>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/sys_info.h"
#include "base/task_runner_util.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/chromeos/policy/browser_policy_connector_chromeos.h"
#include "chrome/browser/chromeos/policy/device_cloud_policy_manager_chromeos.h"
#include "chrome/browser/chromeos/settings/cros_settings.h"
#include "chrome/common/chrome_version_info.h"
#include "chrome/grit/chromium_strings.h"
#include "chrome/grit/generated_resources.h"
#include "chromeos/settings/cros_settings_names.h"
#include "content/public/browser/browser_thread.h"
#include "ui/base/l10n/l10n_util.h"
namespace chromeos {
namespace {
const char* const kReportingFlags[] = {
chromeos::kReportDeviceVersionInfo,
chromeos::kReportDeviceActivityTimes,
chromeos::kReportDeviceBootMode,
chromeos::kReportDeviceLocation,
};
// Strings used to generate the serial number part of the version string.
const char kSerialNumberPrefix[] = "SN:";
} // namespace
///////////////////////////////////////////////////////////////////////////////
// VersionInfoUpdater public:
VersionInfoUpdater::VersionInfoUpdater(Delegate* delegate)
: cros_settings_(chromeos::CrosSettings::Get()),
delegate_(delegate),
weak_pointer_factory_(this) {
}
VersionInfoUpdater::~VersionInfoUpdater() {
policy::BrowserPolicyConnectorChromeOS* connector =
g_browser_process->platform_part()->browser_policy_connector_chromeos();
policy::DeviceCloudPolicyManagerChromeOS* policy_manager =
connector->GetDeviceCloudPolicyManager();
if (policy_manager)
policy_manager->core()->store()->RemoveObserver(this);
}
void VersionInfoUpdater::StartUpdate(bool is_official_build) {
if (base::SysInfo::IsRunningOnChromeOS()) {
base::PostTaskAndReplyWithResult(
content::BrowserThread::GetBlockingPool(),
FROM_HERE,
base::Bind(&version_loader::GetVersion,
is_official_build ? version_loader::VERSION_SHORT_WITH_DATE
: version_loader::VERSION_FULL),
base::Bind(&VersionInfoUpdater::OnVersion,
weak_pointer_factory_.GetWeakPtr()));
} else {
UpdateVersionLabel();
}
policy::BrowserPolicyConnectorChromeOS* connector =
g_browser_process->platform_part()->browser_policy_connector_chromeos();
policy::DeviceCloudPolicyManagerChromeOS* policy_manager =
connector->GetDeviceCloudPolicyManager();
if (policy_manager) {
policy_manager->core()->store()->AddObserver(this);
// Ensure that we have up-to-date enterprise info in case enterprise policy
// is already fetched and has finished initialization.
UpdateEnterpriseInfo();
}
// Watch for changes to the reporting flags.
base::Closure callback =
base::Bind(&VersionInfoUpdater::UpdateEnterpriseInfo,
base::Unretained(this));
for (unsigned int i = 0; i < arraysize(kReportingFlags); ++i) {
subscriptions_.push_back(
cros_settings_->AddSettingsObserver(kReportingFlags[i],
callback).release());
}
}
void VersionInfoUpdater::UpdateVersionLabel() {
if (version_text_.empty())
return;
UpdateSerialNumberInfo();
chrome::VersionInfo version_info;
std::string label_text = l10n_util::GetStringFUTF8(
IDS_LOGIN_VERSION_LABEL_FORMAT,
l10n_util::GetStringUTF16(IDS_PRODUCT_NAME),
base::UTF8ToUTF16(version_info.Version()),
base::UTF8ToUTF16(version_text_),
base::UTF8ToUTF16(serial_number_text_));
// Workaround over incorrect width calculation in old fonts.
// TODO(glotov): remove the following line when new fonts are used.
label_text += ' ';
if (delegate_)
delegate_->OnOSVersionLabelTextUpdated(label_text);
}
void VersionInfoUpdater::UpdateEnterpriseInfo() {
policy::BrowserPolicyConnectorChromeOS* connector =
g_browser_process->platform_part()->browser_policy_connector_chromeos();
SetEnterpriseInfo(connector->GetEnterpriseDomain());
}
void VersionInfoUpdater::SetEnterpriseInfo(const std::string& domain_name) {
// Update the notification about device status reporting.
if (delegate_ && !domain_name.empty()) {
std::string enterprise_info;
enterprise_info = l10n_util::GetStringFUTF8(
IDS_DEVICE_OWNED_BY_NOTICE,
base::UTF8ToUTF16(domain_name));
delegate_->OnEnterpriseInfoUpdated(enterprise_info);
}
}
void VersionInfoUpdater::UpdateSerialNumberInfo() {
std::string sn = policy::DeviceCloudPolicyManagerChromeOS::GetMachineID();
if (!sn.empty()) {
serial_number_text_ = kSerialNumberPrefix;
serial_number_text_.append(sn);
}
}
void VersionInfoUpdater::OnVersion(const std::string& version) {
version_text_ = version;
UpdateVersionLabel();
}
void VersionInfoUpdater::OnStoreLoaded(policy::CloudPolicyStore* store) {
UpdateEnterpriseInfo();
}
void VersionInfoUpdater::OnStoreError(policy::CloudPolicyStore* store) {
UpdateEnterpriseInfo();
}
} // namespace chromeos
| mohamed--abdel-maksoud/chromium.src | chrome/browser/chromeos/login/version_info_updater.cc | C++ | bsd-3-clause | 5,419 |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("SimpleNavigation")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("SimpleNavigation")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
[assembly: ComVisible(false)] | Windows-Readiness/WinDevWorkshop | RU/!RU 01. Introduction/01. Lab B. Solution/Exercise 2/SimpleNavigation/Properties/AssemblyInfo.cs | C# | mit | 1,052 |
//
// Copyright 2012 Christian Henning
//
// Distributed under the Boost Software License, Version 1.0
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
//
#ifndef BOOST_GIL_EXTENSION_IO_PNG_DETAIL_WRITER_BACKEND_HPP
#define BOOST_GIL_EXTENSION_IO_PNG_DETAIL_WRITER_BACKEND_HPP
#include <boost/gil/extension/io/png/tags.hpp>
#include <boost/gil/extension/io/png/detail/base.hpp>
#include <boost/gil/extension/io/png/detail/supported_types.hpp>
#include <boost/gil/io/base.hpp>
#include <boost/gil/io/typedefs.hpp>
namespace boost { namespace gil {
#if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
#pragma warning(push)
#pragma warning(disable:4512) //assignment operator could not be generated
#pragma warning(disable:4611) //interaction between '_setjmp' and C++ object destruction is non-portable
#endif
///
/// PNG Writer Backend
///
template< typename Device >
struct writer_backend< Device
, png_tag
>
: public detail::png_struct_info_wrapper
{
private:
using this_t = writer_backend<Device, png_tag>;
public:
using format_tag_t = png_tag;
///
/// Constructor
///
writer_backend( const Device& io_dev
, const image_write_info< png_tag >& info
)
: png_struct_info_wrapper( false )
, _io_dev( io_dev )
, _info( info )
{
// Create and initialize the png_struct with the desired error handler
// functions. If you want to use the default stderr and longjump method,
// you can supply NULL for the last three parameters. We also check that
// the library version is compatible with the one used at compile time,
// in case we are using dynamically linked libraries. REQUIRED.
get()->_struct = png_create_write_struct( PNG_LIBPNG_VER_STRING
, nullptr // user_error_ptr
, nullptr // user_error_fn
, nullptr // user_warning_fn
);
io_error_if( get_struct() == nullptr
, "png_writer: fail to call png_create_write_struct()"
);
// Allocate/initialize the image information data. REQUIRED
get()->_info = png_create_info_struct( get_struct() );
if( get_info() == nullptr )
{
png_destroy_write_struct( &get()->_struct
, nullptr
);
io_error( "png_writer: fail to call png_create_info_struct()" );
}
// Set error handling. REQUIRED if you aren't supplying your own
// error handling functions in the png_create_write_struct() call.
if( setjmp( png_jmpbuf( get_struct() )))
{
//free all of the memory associated with the png_ptr and info_ptr
png_destroy_write_struct( &get()->_struct
, &get()->_info
);
io_error( "png_writer: fail to call setjmp()" );
}
init_io( get_struct() );
}
protected:
template< typename View >
void write_header( const View& view )
{
using png_rw_info_t = detail::png_write_support
<
typename channel_type<typename get_pixel_type<View>::type>::type,
typename color_space_type<View>::type
>;
// Set the image information here. Width and height are up to 2^31,
// bit_depth is one of 1, 2, 4, 8, or 16, but valid values also depend on
// the color_type selected. color_type is one of PNG_COLOR_TYPE_GRAY,
// PNG_COLOR_TYPE_GRAY_ALPHA, PNG_COLOR_TYPE_PALETTE, PNG_COLOR_TYPE_RGB,
// or PNG_COLOR_TYPE_RGB_ALPHA. interlace is either PNG_INTERLACE_NONE or
// PNG_INTERLACE_ADAM7, and the compression_type and filter_type MUST
// currently be PNG_COMPRESSION_TYPE_BASE and PNG_FILTER_TYPE_BASE. REQUIRED
png_set_IHDR( get_struct()
, get_info()
, static_cast< png_image_width::type >( view.width() )
, static_cast< png_image_height::type >( view.height() )
, static_cast< png_bitdepth::type >( png_rw_info_t::_bit_depth )
, static_cast< png_color_type::type >( png_rw_info_t::_color_type )
, _info._interlace_method
, _info._compression_type
, _info._filter_method
);
#ifdef BOOST_GIL_IO_PNG_FLOATING_POINT_SUPPORTED
if( _info._valid_cie_colors )
{
png_set_cHRM( get_struct()
, get_info()
, _info._white_x
, _info._white_y
, _info._red_x
, _info._red_y
, _info._green_x
, _info._green_y
, _info._blue_x
, _info._blue_y
);
}
if( _info._valid_file_gamma )
{
png_set_gAMA( get_struct()
, get_info()
, _info._file_gamma
);
}
#else
if( _info._valid_cie_colors )
{
png_set_cHRM_fixed( get_struct()
, get_info()
, _info._white_x
, _info._white_y
, _info._red_x
, _info._red_y
, _info._green_x
, _info._green_y
, _info._blue_x
, _info._blue_y
);
}
if( _info._valid_file_gamma )
{
png_set_gAMA_fixed( get_struct()
, get_info()
, _info._file_gamma
);
}
#endif // BOOST_GIL_IO_PNG_FLOATING_POINT_SUPPORTED
if( _info._valid_icc_profile )
{
#if PNG_LIBPNG_VER_MINOR >= 5
png_set_iCCP( get_struct()
, get_info()
, const_cast< png_charp >( _info._icc_name.c_str() )
, _info._iccp_compression_type
, reinterpret_cast< png_const_bytep >( & (_info._profile.front ()) )
, _info._profile_length
);
#else
png_set_iCCP( get_struct()
, get_info()
, const_cast< png_charp >( _info._icc_name.c_str() )
, _info._iccp_compression_type
, const_cast< png_charp >( & (_info._profile.front()) )
, _info._profile_length
);
#endif
}
if( _info._valid_intent )
{
png_set_sRGB( get_struct()
, get_info()
, _info._intent
);
}
if( _info._valid_palette )
{
png_set_PLTE( get_struct()
, get_info()
, const_cast< png_colorp >( &_info._palette.front() )
, _info._num_palette
);
}
if( _info._valid_background )
{
png_set_bKGD( get_struct()
, get_info()
, const_cast< png_color_16p >( &_info._background )
);
}
if( _info._valid_histogram )
{
png_set_hIST( get_struct()
, get_info()
, const_cast< png_uint_16p >( &_info._histogram.front() )
);
}
if( _info._valid_offset )
{
png_set_oFFs( get_struct()
, get_info()
, _info._offset_x
, _info._offset_y
, _info._off_unit_type
);
}
if( _info._valid_pixel_calibration )
{
std::vector< const char* > params( _info._num_params );
for( std::size_t i = 0; i < params.size(); ++i )
{
params[i] = _info._params[ i ].c_str();
}
png_set_pCAL( get_struct()
, get_info()
, const_cast< png_charp >( _info._purpose.c_str() )
, _info._X0
, _info._X1
, _info._cal_type
, _info._num_params
, const_cast< png_charp >( _info._units.c_str() )
, const_cast< png_charpp >( ¶ms.front() )
);
}
if( _info._valid_resolution )
{
png_set_pHYs( get_struct()
, get_info()
, _info._res_x
, _info._res_y
, _info._phy_unit_type
);
}
if( _info._valid_significant_bits )
{
png_set_sBIT( get_struct()
, get_info()
, const_cast< png_color_8p >( &_info._sig_bits )
);
}
#ifndef BOOST_GIL_IO_PNG_1_4_OR_LOWER
#ifdef BOOST_GIL_IO_PNG_FLOATING_POINT_SUPPORTED
if( _info._valid_scale_factors )
{
png_set_sCAL( get_struct()
, get_info()
, this->_info._scale_unit
, this->_info._scale_width
, this->_info._scale_height
);
}
#else
#ifdef BOOST_GIL_IO_PNG_FIXED_POINT_SUPPORTED
if( _info._valid_scale_factors )
{
png_set_sCAL_fixed( get_struct()
, get_info()
, this->_info._scale_unit
, this->_info._scale_width
, this->_info._scale_height
);
}
#else
if( _info._valid_scale_factors )
{
png_set_sCAL_s( get_struct()
, get_info()
, this->_info._scale_unit
, const_cast< png_charp >( this->_info._scale_width.c_str() )
, const_cast< png_charp >( this->_info._scale_height.c_str() )
);
}
#endif // BOOST_GIL_IO_PNG_FIXED_POINT_SUPPORTED
#endif // BOOST_GIL_IO_PNG_FLOATING_POINT_SUPPORTED
#endif // BOOST_GIL_IO_PNG_1_4_OR_LOWER
if( _info._valid_text )
{
std::vector< png_text > texts( _info._num_text );
for( std::size_t i = 0; i < texts.size(); ++i )
{
png_text pt;
pt.compression = _info._text[i]._compression;
pt.key = const_cast< png_charp >( this->_info._text[i]._key.c_str() );
pt.text = const_cast< png_charp >( this->_info._text[i]._text.c_str() );
pt.text_length = _info._text[i]._text.length();
texts[i] = pt;
}
png_set_text( get_struct()
, get_info()
, &texts.front()
, _info._num_text
);
}
if( _info._valid_modification_time )
{
png_set_tIME( get_struct()
, get_info()
, const_cast< png_timep >( &_info._mod_time )
);
}
if( _info._valid_transparency_factors )
{
int sample_max = ( 1u << _info._bit_depth );
/* libpng doesn't reject a tRNS chunk with out-of-range samples */
if( !( ( _info._color_type == PNG_COLOR_TYPE_GRAY
&& (int) _info._trans_values[0].gray > sample_max
)
|| ( _info._color_type == PNG_COLOR_TYPE_RGB
&&( (int) _info._trans_values[0].red > sample_max
|| (int) _info._trans_values[0].green > sample_max
|| (int) _info._trans_values[0].blue > sample_max
)
)
)
)
{
//@todo Fix that once reading transparency values works
/*
png_set_tRNS( get_struct()
, get_info()
, trans
, num_trans
, trans_values
);
*/
}
}
// Compression Levels - valid values are [0,9]
png_set_compression_level( get_struct()
, _info._compression_level
);
png_set_compression_mem_level( get_struct()
, _info._compression_mem_level
);
png_set_compression_strategy( get_struct()
, _info._compression_strategy
);
png_set_compression_window_bits( get_struct()
, _info._compression_window_bits
);
png_set_compression_method( get_struct()
, _info._compression_method
);
png_set_compression_buffer_size( get_struct()
, _info._compression_buffer_size
);
#ifdef BOOST_GIL_IO_PNG_DITHERING_SUPPORTED
// Dithering
if( _info._set_dithering )
{
png_set_dither( get_struct()
, &_info._dithering_palette.front()
, _info._dithering_num_palette
, _info._dithering_maximum_colors
, &_info._dithering_histogram.front()
, _info._full_dither
);
}
#endif // BOOST_GIL_IO_PNG_DITHERING_SUPPORTED
// Filter
if( _info._set_filter )
{
png_set_filter( get_struct()
, 0
, _info._filter
);
}
// Invert Mono
if( _info._invert_mono )
{
png_set_invert_mono( get_struct() );
}
// True Bits
if( _info._set_true_bits )
{
png_set_sBIT( get_struct()
, get_info()
, &_info._true_bits.front()
);
}
// sRGB Intent
if( _info._set_srgb_intent )
{
png_set_sRGB( get_struct()
, get_info()
, _info._srgb_intent
);
}
// Strip Alpha
if( _info._strip_alpha )
{
png_set_strip_alpha( get_struct() );
}
// Swap Alpha
if( _info._swap_alpha )
{
png_set_swap_alpha( get_struct() );
}
png_write_info( get_struct()
, get_info()
);
}
protected:
static void write_data( png_structp png_ptr
, png_bytep data
, png_size_t length
)
{
static_cast< Device* >( png_get_io_ptr( png_ptr ))->write( data
, length );
}
static void flush( png_structp png_ptr )
{
static_cast< Device* >(png_get_io_ptr(png_ptr) )->flush();
}
private:
void init_io( png_structp png_ptr )
{
png_set_write_fn( png_ptr
, static_cast< void* > ( &this->_io_dev )
, static_cast< png_rw_ptr > ( &this_t::write_data )
, static_cast< png_flush_ptr >( &this_t::flush )
);
}
public:
Device _io_dev;
image_write_info< png_tag > _info;
};
#if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
#pragma warning(pop)
#endif
} // namespace gil
} // namespace boost
#endif
| kumakoko/KumaGL | third_lib/boost/1.75.0/boost/gil/extension/io/png/detail/writer_backend.hpp | C++ | mit | 16,790 |
# encoding: utf-8
require 'spec_helper'
def encode_base64(str)
Mail::Encodings::Base64.encode(str)
end
def check_decoded(actual, expected)
if RUBY_VERSION >= '1.9'
expect(actual.encoding).to eq Encoding::BINARY
expect(actual).to eq expected.force_encoding(Encoding::BINARY)
else
expect(actual).to eq expected
end
end
describe "Attachments" do
before(:each) do
@mail = Mail.new
@test_png = File.open(fixture('attachments', 'test.png'), 'rb', &:read)
end
describe "from direct content" do
it "should work" do
@mail.attachments['test.png'] = @test_png
expect(@mail.attachments['test.png'].filename).to eq 'test.png'
check_decoded(@mail.attachments[0].decoded, @test_png)
end
it "should work out magically the mime_type" do
@mail.attachments['test.png'] = @test_png
expect(@mail.attachments[0].mime_type).to eq 'image/png'
end
it "should assign the filename" do
@mail.attachments['test.png'] = @test_png
expect(@mail.attachments[0].filename).to eq 'test.png'
end
it "should assign mime-encoded multibyte filename" do
@mail.attachments['てすと.txt'] = File.open(fixture('attachments', 'てすと.txt'), 'rb', &:read)
expect(@mail.attachments).not_to be_blank
expect(Mail::Encodings.decode_encode(@mail.attachments[0].filename, :decode)).to eq 'てすと.txt'
end
end
describe "from a supplied Hash" do
it "should work" do
@mail.attachments['test.png'] = { :content => @test_png }
expect(@mail.attachments[0].filename).to eq 'test.png'
check_decoded(@mail.attachments[0].decoded, @test_png)
end
it "should allow you to override the content_type" do
@mail.attachments['test.png'] = { :content => @test_png,
:content_type => "application/x-gzip" }
expect(@mail.attachments[0].content_type).to eq 'application/x-gzip'
end
it "should allow you to override the mime_type" do
@mail.attachments['test.png'] = { :content => @test_png,
:mime_type => "application/x-gzip" }
expect(@mail.attachments[0].mime_type).to eq 'application/x-gzip'
end
it "should allow you to override the mime_type" do
@mail.attachments['invoice.jpg'] = { :data => "you smiling",
:mime_type => "image/x-jpg",
:transfer_encoding => "base64" }
expect(@mail.attachments[0].mime_type).to eq 'image/x-jpg'
end
end
describe "decoding and encoding" do
it "should set its content_transfer_encoding" do
@mail.attachments['test.png'] = { :content => @test_png }
@mail.ready_to_send!
expect(@mail.attachments[0].content_transfer_encoding).to eq 'base64'
end
it "should encode its body to base64" do
@mail.attachments['test.png'] = { :content => @test_png }
@mail.ready_to_send!
expect(@mail.attachments[0].encoded).to include(encode_base64(@test_png))
end
it "should allow you to pass in an encoded attachment with an encoding" do
encoded_data = encode_base64(@test_png)
@mail.attachments['test.png'] = { :content => encoded_data,
:encoding => 'base64' }
check_decoded(@mail.attachments[0].decoded, @test_png)
end
it "should allow you set a mime type and encoding without overriding the encoding" do
encoded = encode_base64('<foo/>')
@mail.attachments['test.png'] = { :mime_type => 'text/xml', :content => encoded, :encoding => 'base64' }
expect(@mail.attachments[0].content_transfer_encoding).to eq 'base64'
check_decoded(@mail.attachments[0].decoded, '<foo/>')
end
it "should not allow you to pass in an encoded attachment with an unknown encoding" do
base64_encoded_data = encode_base64(@test_png)
expect {@mail.attachments['test.png'] = { :content => base64_encoded_data,
:encoding => 'weird_encoding' }}.to raise_error
end
it "should be able to call read on the attachment to return the decoded data" do
@mail.attachments['test.png'] = { :content => @test_png }
if RUBY_VERSION >= '1.9'
expected = @mail.attachments[0].read.force_encoding(@test_png.encoding)
else
expected = @mail.attachments[0].read
end
expect(expected).to eq @test_png
end
it "should only add one newline between attachment body and boundary" do
contents = "I have\ntwo lines with trailing newlines\n\n"
@mail.attachments['text.txt'] = { :content => contents}
encoded = @mail.encoded
regex = /\r\n#{Regexp.escape(contents.gsub(/\n/, "\r\n"))}\r\n--#{@mail.boundary}--\r\n\Z/
expect(encoded).to match regex
end
end
describe "multiple attachments" do
it "should allow you to pass in more than one attachment" do
mail = Mail.new
mail.attachments['test.pdf'] = File.open(fixture('attachments', 'test.pdf'), 'rb', &:read)
mail.attachments['test.gif'] = File.open(fixture('attachments', 'test.gif'), 'rb', &:read)
mail.attachments['test.jpg'] = File.open(fixture('attachments', 'test.jpg'), 'rb', &:read)
mail.attachments['test.zip'] = File.open(fixture('attachments', 'test.zip'), 'rb', &:read)
expect(mail.attachments[0].filename).to eq 'test.pdf'
expect(mail.attachments[1].filename).to eq 'test.gif'
expect(mail.attachments[2].filename).to eq 'test.jpg'
expect(mail.attachments[3].filename).to eq 'test.zip'
end
end
describe "inline attachments" do
it "should set the content_disposition to inline or attachment as appropriate" do
mail = Mail.new
mail.attachments['test.pdf'] = File.open(fixture('attachments', 'test.pdf'), 'rb', &:read)
expect(mail.attachments['test.pdf'].content_disposition).to eq 'attachment; filename=test.pdf'
mail.attachments.inline['test.png'] = File.open(fixture('attachments', 'test.png'), 'rb', &:read)
expect(mail.attachments.inline['test.png'].content_disposition).to eq 'inline; filename=test.png'
end
it "should return a cid" do
mail = Mail.new
mail.attachments.inline['test.png'] = @test_png
expect(mail.attachments['test.png'].url).to eq "cid:#{mail.attachments['test.png'].cid}"
end
it "should respond true to inline?" do
mail = Mail.new
mail.attachments.inline['test.png'] = @test_png
expect(mail.attachments['test.png']).to be_inline
end
end
describe "getting the content ID from an attachment" do
before(:each) do
@mail = Mail.new
@mail.attachments['test.gif'] = File.open(fixture('attachments', 'test.gif'), 'rb', &:read)
@cid = @mail.attachments['test.gif'].content_id
end
it "should return a content-id for the attachment on creation if passed inline => true" do
expect(@cid).not_to be_nil
end
it "should return a valid content-id on inline attachments" do
expect(Mail::ContentIdField.new(@cid).errors).to be_empty
end
it "should provide a URL escaped content_id (without brackets) for use inside an email" do
@inline = @mail.attachments['test.gif'].cid
uri_parser = URI.const_defined?(:Parser) ? URI::Parser.new : URI
expect(@inline).to eq uri_parser.escape(@cid.gsub(/^</, '').gsub(/>$/, ''))
end
end
describe "setting the content type correctly" do
it "should set the content type to multipart/mixed if none given and you add an attachment" do
mail = Mail.new
mail.attachments['test.pdf'] = File.open(fixture('attachments', 'test.pdf'), 'rb', &:read)
mail.encoded
expect(mail.mime_type).to eq 'multipart/mixed'
end
it "allows you to set the attachment before the content type" do
mail = Mail.new
mail.attachments["test.png"] = File.open(fixture('attachments', 'test.png'), 'rb', &:read)
mail.body = "Lots of HTML"
mail.mime_version = '1.0'
mail.content_type = 'text/html; charset=UTF-8'
end
end
describe "should handle filenames with non-7bit characters correctly" do
it "should not raise an exception with a filename that contains a non-7bit-character" do
filename = "f\u00f6\u00f6.b\u00e4r"
if RUBY_VERSION >= '1.9'
expect(filename.encoding).to eq Encoding::UTF_8
end
mail = Mail.new
expect {
mail.attachments[filename] = File.open(fixture('attachments', 'test.pdf'), 'rb', &:read)
}.not_to raise_error
end
end
end
describe "reading emails with attachments" do
describe "test emails" do
it "should find the attachment using content location" do
mail = Mail.read(fixture(File.join('emails', 'attachment_emails', 'attachment_content_location.eml')))
expect(mail.attachments.length).to eq 1
end
it "should find an attachment defined with 'name' and Content-Disposition: attachment" do
mail = Mail.read(fixture(File.join('emails', 'attachment_emails', 'attachment_content_disposition.eml')))
expect(mail.attachments.length).to eq 1
end
it "should use the content-type filename or name over the content-disposition filename" do
mail = Mail.read(fixture(File.join('emails', 'attachment_emails', 'attachment_content_disposition.eml')))
expect(mail.attachments[0].filename).to eq 'hello.rb'
end
it "should decode an attachment" do
mail = Mail.read(fixture(File.join('emails', 'attachment_emails', 'attachment_pdf.eml')))
expect(mail.attachments[0].decoded.length).to eq 1026
end
it "should find an attachment that has an encoded name value" do
mail = Mail.read(fixture(File.join('emails', 'attachment_emails', 'attachment_with_encoded_name.eml')))
expect(mail.attachments.length).to eq 1
result = mail.attachments[0].filename
if RUBY_VERSION >= '1.9'
expected = "01 Quien Te Dij\212at. Pitbull.mp3".force_encoding(result.encoding)
else
expected = "01 Quien Te Dij\212at. Pitbull.mp3"
end
expect(result).to eq expected
end
it "should find an attachment that has a name not surrounded by quotes" do
mail = Mail.read(fixture(File.join('emails', 'attachment_emails', "attachment_with_unquoted_name.eml")))
expect(mail.attachments.length).to eq 1
expect(mail.attachments.first.filename).to eq "This is a test.txt"
end
it "should find attachments inside parts with content-type message/rfc822" do
mail = Mail.read(fixture(File.join("emails",
"attachment_emails",
"attachment_message_rfc822.eml")))
expect(mail.attachments.length).to eq 1
expect(mail.attachments[0].decoded.length).to eq 1026
end
it "attach filename decoding (issue 83)" do
data = <<-limitMAIL
Subject: aaa
From: aaa@aaa.com
To: bbb@aaa.com
Content-Type: multipart/mixed; boundary=0016e64c0af257c3a7048b69e1ac
--0016e64c0af257c3a7048b69e1ac
Content-Type: multipart/alternative; boundary=0016e64c0af257c3a1048b69e1aa
--0016e64c0af257c3a1048b69e1aa
Content-Type: text/plain; charset=ISO-8859-1
aaa
--0016e64c0af257c3a1048b69e1aa
Content-Type: text/html; charset=ISO-8859-1
aaa<br>
--0016e64c0af257c3a1048b69e1aa--
--0016e64c0af257c3a7048b69e1ac
Content-Type: text/plain; charset=US-ASCII; name="=?utf-8?b?Rm90bzAwMDkuanBn?="
Content-Disposition: attachment; filename="=?utf-8?b?Rm90bzAwMDkuanBn?="
Content-Transfer-Encoding: base64
X-Attachment-Id: f_gbneqxxy0
YWFhCg==
--0016e64c0af257c3a7048b69e1ac--
limitMAIL
mail = Mail.new(data)
#~ puts Mail::Encodings.decode_encode(mail.attachments[0].filename, :decode)
expect(mail.attachments[0].filename).to eq "Foto0009.jpg"
end
end
end
describe "attachment order" do
it "should be preserved instead when content type exists" do
mail = Mail.new do
to "aaaa@aaaa.aaa"
from "aaaa2@aaaa.aaa"
subject "a subject"
date Time.now
text_part do
content_type 'text/plain; charset=UTF-8'
body "a \nsimplebody\n"
end
end
mail.attachments['test.zip'] = File.open(fixture('attachments', 'test.zip'), 'rb', &:read)
mail.attachments['test.pdf'] = File.open(fixture('attachments', 'test.pdf'), 'rb', &:read)
mail.attachments['test.gif'] = File.open(fixture('attachments', 'test.gif'), 'rb', &:read)
mail.attachments['test.jpg'] = File.open(fixture('attachments', 'test.jpg'), 'rb', &:read)
expect(mail.attachments[0].filename).to eq 'test.zip'
expect(mail.attachments[1].filename).to eq 'test.pdf'
expect(mail.attachments[2].filename).to eq 'test.gif'
expect(mail.attachments[3].filename).to eq 'test.jpg'
mail2 = Mail.new(mail.encoded)
expect(mail2.attachments[0].filename).to eq 'test.zip'
expect(mail2.attachments[1].filename).to eq 'test.pdf'
expect(mail2.attachments[2].filename).to eq 'test.gif'
expect(mail2.attachments[3].filename).to eq 'test.jpg'
end
end
| kjg/mail | spec/mail/attachments_list_spec.rb | Ruby | mit | 13,083 |
/*
* Copyright 2016 Rethink Robotics
*
* Copyright 2016 Chris Smith
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
let fs = require('fs');
let path = require('path');
let cmakePath = process.env.CMAKE_PREFIX_PATH;
let cmakePaths = cmakePath.split(':');
let jsMsgPath = 'share/gennodejs/ros';
let packagePaths = {};
module.exports = function (messagePackage) {
if (packagePaths.hasOwnProperty(messagePackage)) {
return packagePaths[messagePackage];
}
// else
const found = cmakePaths.some((cmakePath) => {
let path_ = path.join(cmakePath, jsMsgPath, messagePackage, '_index.js');
if (fs.existsSync(path_)) {
packagePaths[messagePackage] = require(path_);
return true;
}
return false;
});
if (found) {
return packagePaths[messagePackage];
}
// else
throw new Error('Unable to find message package ' + messagePackage + ' from CMAKE_PREFIX_PATH');
};
| tarquasso/softroboticfish6 | fish/pi/ros/catkin_ws/src/rosserial/devel/share/gennodejs/ros/rosserial_msgs/find.js | JavaScript | mit | 1,464 |
/**
* @fileoverview Rule to flag wrapping non-iife in parens
* @author Gyandeep Singh
*/
"use strict";
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
/**
* Checks whether or not a given node is an `Identifier` node which was named a given name.
* @param {ASTNode} node - A node to check.
* @param {string} name - An expected name of the node.
* @returns {boolean} `true` if the node is an `Identifier` node which was named as expected.
*/
function isIdentifier(node, name) {
return node.type === "Identifier" && node.name === name;
}
/**
* Checks whether or not a given node is an argument of a specified method call.
* @param {ASTNode} node - A node to check.
* @param {number} index - An expected index of the node in arguments.
* @param {string} object - An expected name of the object of the method.
* @param {string} property - An expected name of the method.
* @returns {boolean} `true` if the node is an argument of the specified method call.
*/
function isArgumentOfMethodCall(node, index, object, property) {
const parent = node.parent;
return (
parent.type === "CallExpression" &&
parent.callee.type === "MemberExpression" &&
parent.callee.computed === false &&
isIdentifier(parent.callee.object, object) &&
isIdentifier(parent.callee.property, property) &&
parent.arguments[index] === node
);
}
/**
* Checks whether or not a given node is a property descriptor.
* @param {ASTNode} node - A node to check.
* @returns {boolean} `true` if the node is a property descriptor.
*/
function isPropertyDescriptor(node) {
// Object.defineProperty(obj, "foo", {set: ...})
if (isArgumentOfMethodCall(node, 2, "Object", "defineProperty") ||
isArgumentOfMethodCall(node, 2, "Reflect", "defineProperty")
) {
return true;
}
/*
* Object.defineProperties(obj, {foo: {set: ...}})
* Object.create(proto, {foo: {set: ...}})
*/
const grandparent = node.parent.parent;
return grandparent.type === "ObjectExpression" && (
isArgumentOfMethodCall(grandparent, 1, "Object", "create") ||
isArgumentOfMethodCall(grandparent, 1, "Object", "defineProperties")
);
}
//------------------------------------------------------------------------------
// Rule Definition
//------------------------------------------------------------------------------
module.exports = {
meta: {
docs: {
description: "enforce getter and setter pairs in objects",
category: "Best Practices",
recommended: false,
url: "https://eslint.org/docs/rules/accessor-pairs"
},
schema: [{
type: "object",
properties: {
getWithoutSet: {
type: "boolean"
},
setWithoutGet: {
type: "boolean"
}
},
additionalProperties: false
}],
messages: {
getter: "Getter is not present.",
setter: "Setter is not present."
}
},
create(context) {
const config = context.options[0] || {};
const checkGetWithoutSet = config.getWithoutSet === true;
const checkSetWithoutGet = config.setWithoutGet !== false;
/**
* Checks a object expression to see if it has setter and getter both present or none.
* @param {ASTNode} node The node to check.
* @returns {void}
* @private
*/
function checkLonelySetGet(node) {
let isSetPresent = false;
let isGetPresent = false;
const isDescriptor = isPropertyDescriptor(node);
for (let i = 0, end = node.properties.length; i < end; i++) {
const property = node.properties[i];
let propToCheck = "";
if (property.kind === "init") {
if (isDescriptor && !property.computed) {
propToCheck = property.key.name;
}
} else {
propToCheck = property.kind;
}
switch (propToCheck) {
case "set":
isSetPresent = true;
break;
case "get":
isGetPresent = true;
break;
default:
// Do nothing
}
if (isSetPresent && isGetPresent) {
break;
}
}
if (checkSetWithoutGet && isSetPresent && !isGetPresent) {
context.report({ node, messageId: "getter" });
} else if (checkGetWithoutSet && isGetPresent && !isSetPresent) {
context.report({ node, messageId: "setter" });
}
}
return {
ObjectExpression(node) {
if (checkSetWithoutGet || checkGetWithoutSet) {
checkLonelySetGet(node);
}
}
};
}
};
| EdwardStudy/myghostblog | versions/1.25.7/node_modules/eslint/lib/rules/accessor-pairs.js | JavaScript | mit | 5,257 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This is the version of the Android-specific Chromium linker that uses
// the crazy linker to load libraries.
// This source code *cannot* depend on anything from base/ or the C++
// STL, to keep the final library small, and avoid ugly dependency issues.
#include "legacy_linker_jni.h"
#include <crazy_linker.h>
#include <fcntl.h>
#include <jni.h>
#include <limits.h>
#include <stddef.h>
#include <stdlib.h>
#include <unistd.h>
#include "linker_jni.h"
namespace chromium_android_linker {
namespace {
// Retrieve the SDK build version and pass it into the crazy linker. This
// needs to be done early in initialization, before any other crazy linker
// code is run.
// |env| is the current JNI environment handle.
// On success, return true.
bool InitSDKVersionInfo(JNIEnv* env) {
jint value = 0;
if (!InitStaticInt(env, "android/os/Build$VERSION", "SDK_INT", &value))
return false;
crazy_set_sdk_build_version(static_cast<int>(value));
LOG_INFO("Set SDK build version to %d", static_cast<int>(value));
return true;
}
// The linker uses a single crazy_context_t object created on demand.
// There is no need to protect this against concurrent access, locking
// is already handled on the Java side.
crazy_context_t* GetCrazyContext() {
static crazy_context_t* s_crazy_context = nullptr;
if (!s_crazy_context) {
// Create new context.
s_crazy_context = crazy_context_create();
// Ensure libraries located in the same directory as the linker
// can be loaded before system ones.
crazy_context_add_search_path_for_address(
s_crazy_context, reinterpret_cast<void*>(&s_crazy_context));
}
return s_crazy_context;
}
// A scoped crazy_library_t that automatically closes the handle
// on scope exit, unless Release() has been called.
class ScopedLibrary {
public:
ScopedLibrary() : lib_(nullptr) {}
~ScopedLibrary() {
if (lib_)
crazy_library_close_with_context(lib_, GetCrazyContext());
}
crazy_library_t* Get() { return lib_; }
crazy_library_t** GetPtr() { return &lib_; }
crazy_library_t* Release() {
crazy_library_t* ret = lib_;
lib_ = nullptr;
return ret;
}
private:
crazy_library_t* lib_;
};
template <class LibraryOpener>
bool GenericLoadLibrary(JNIEnv* env,
const char* library_name,
jlong load_address,
jobject lib_info_obj,
const LibraryOpener& opener) {
LOG_INFO("Called for %s, at address 0x%llx", library_name, load_address);
crazy_context_t* context = GetCrazyContext();
if (!IsValidAddress(load_address)) {
LOG_ERROR("Invalid address 0x%llx", load_address);
return false;
}
// Set the desired load address (0 means randomize it).
crazy_context_set_load_address(context, static_cast<size_t>(load_address));
ScopedLibrary library;
if (!opener.Open(library.GetPtr(), library_name, context)) {
return false;
}
crazy_library_info_t info;
if (!crazy_library_get_info(library.Get(), context, &info)) {
LOG_ERROR("Could not get library information for %s: %s",
library_name, crazy_context_get_error(context));
return false;
}
// Release library object to keep it alive after the function returns.
library.Release();
s_lib_info_fields.SetLoadInfo(env,
lib_info_obj,
info.load_address, info.load_size);
LOG_INFO("Success loading library %s", library_name);
return true;
}
// Used for opening the library in a regular file.
class FileLibraryOpener {
public:
bool Open(crazy_library_t** library,
const char* library_name,
crazy_context_t* context) const;
};
bool FileLibraryOpener::Open(crazy_library_t** library,
const char* library_name,
crazy_context_t* context) const {
if (!crazy_library_open(library, library_name, context)) {
LOG_ERROR("Could not open %s: %s",
library_name, crazy_context_get_error(context));
return false;
}
return true;
}
// Used for opening the library in a zip file.
class ZipLibraryOpener {
public:
explicit ZipLibraryOpener(const char* zip_file) : zip_file_(zip_file) { }
bool Open(crazy_library_t** library,
const char* library_name,
crazy_context_t* context) const;
private:
const char* zip_file_;
};
bool ZipLibraryOpener::Open(crazy_library_t** library,
const char* library_name,
crazy_context_t* context) const {
if (!crazy_library_open_in_zip_file(library,
zip_file_,
library_name,
context)) {
LOG_ERROR("Could not open %s in zip file %s: %s",
library_name, zip_file_, crazy_context_get_error(context));
return false;
}
return true;
}
// Load a library with the chromium linker. This will also call its
// JNI_OnLoad() method, which shall register its methods. Note that
// lazy native method resolution will _not_ work after this, because
// Dalvik uses the system's dlsym() which won't see the new library,
// so explicit registration is mandatory.
//
// |env| is the current JNI environment handle.
// |clazz| is the static class handle for org.chromium.base.Linker,
// and is ignored here.
// |library_name| is the library name (e.g. libfoo.so).
// |load_address| is an explicit load address.
// |library_info| is a LibInfo handle used to communicate information
// with the Java side.
// Return true on success.
jboolean LoadLibrary(JNIEnv* env,
jclass clazz,
jstring library_name,
jlong load_address,
jobject lib_info_obj) {
String lib_name(env, library_name);
FileLibraryOpener opener;
return GenericLoadLibrary(env,
lib_name.c_str(),
static_cast<size_t>(load_address),
lib_info_obj,
opener);
}
// Load a library from a zipfile with the chromium linker. The
// library in the zipfile must be uncompressed and page aligned.
// The basename of the library is given. The library is expected
// to be lib/<abi_tag>/crazy.<basename>. The <abi_tag> used will be the
// same as the abi for this linker. The "crazy." prefix is included
// so that the Android Package Manager doesn't extract the library into
// /data/app-lib.
//
// Loading the library will also call its JNI_OnLoad() method, which
// shall register its methods. Note that lazy native method resolution
// will _not_ work after this, because Dalvik uses the system's dlsym()
// which won't see the new library, so explicit registration is mandatory.
//
// |env| is the current JNI environment handle.
// |clazz| is the static class handle for org.chromium.base.Linker,
// and is ignored here.
// |zipfile_name| is the filename of the zipfile containing the library.
// |library_name| is the library base name (e.g. libfoo.so).
// |load_address| is an explicit load address.
// |library_info| is a LibInfo handle used to communicate information
// with the Java side.
// Returns true on success.
jboolean LoadLibraryInZipFile(JNIEnv* env,
jclass clazz,
jstring zipfile_name,
jstring library_name,
jlong load_address,
jobject lib_info_obj) {
String zipfile_name_str(env, zipfile_name);
String lib_name(env, library_name);
ZipLibraryOpener opener(zipfile_name_str.c_str());
return GenericLoadLibrary(env,
lib_name.c_str(),
static_cast<size_t>(load_address),
lib_info_obj,
opener);
}
// Class holding the Java class and method ID for the Java side Linker
// postCallbackOnMainThread method.
struct JavaCallbackBindings_class {
jclass clazz;
jmethodID method_id;
// Initialize an instance.
bool Init(JNIEnv* env, jclass linker_class) {
clazz = reinterpret_cast<jclass>(env->NewGlobalRef(linker_class));
return InitStaticMethodId(env,
linker_class,
"postCallbackOnMainThread",
"(J)V",
&method_id);
}
};
static JavaCallbackBindings_class s_java_callback_bindings;
// Designated receiver function for callbacks from Java. Its name is known
// to the Java side.
// |env| is the current JNI environment handle and is ignored here.
// |clazz| is the static class handle for org.chromium.base.Linker,
// and is ignored here.
// |arg| is a pointer to an allocated crazy_callback_t, deleted after use.
void RunCallbackOnUiThread(JNIEnv* env, jclass clazz, jlong arg) {
crazy_callback_t* callback = reinterpret_cast<crazy_callback_t*>(arg);
LOG_INFO("Called back from java with handler %p, opaque %p",
callback->handler, callback->opaque);
crazy_callback_run(callback);
delete callback;
}
// Request a callback from Java. The supplied crazy_callback_t is valid only
// for the duration of this call, so we copy it to a newly allocated
// crazy_callback_t and then call the Java side's postCallbackOnMainThread.
// This will call back to to our RunCallbackOnUiThread some time
// later on the UI thread.
// |callback_request| is a crazy_callback_t.
// |poster_opaque| is unused.
// Returns true if the callback request succeeds.
static bool PostForLaterExecution(crazy_callback_t* callback_request,
void* poster_opaque UNUSED) {
crazy_context_t* context = GetCrazyContext();
JavaVM* vm;
int minimum_jni_version;
crazy_context_get_java_vm(context,
reinterpret_cast<void**>(&vm),
&minimum_jni_version);
// Do not reuse JNIEnv from JNI_OnLoad, but retrieve our own.
JNIEnv* env;
if (JNI_OK != vm->GetEnv(
reinterpret_cast<void**>(&env), minimum_jni_version)) {
LOG_ERROR("Could not create JNIEnv");
return false;
}
// Copy the callback; the one passed as an argument may be temporary.
crazy_callback_t* callback = new crazy_callback_t();
*callback = *callback_request;
LOG_INFO("Calling back to java with handler %p, opaque %p",
callback->handler, callback->opaque);
jlong arg = static_cast<jlong>(reinterpret_cast<uintptr_t>(callback));
env->CallStaticVoidMethod(
s_java_callback_bindings.clazz, s_java_callback_bindings.method_id, arg);
// Back out and return false if we encounter a JNI exception.
if (env->ExceptionCheck() == JNI_TRUE) {
env->ExceptionDescribe();
env->ExceptionClear();
delete callback;
return false;
}
return true;
}
jboolean CreateSharedRelro(JNIEnv* env,
jclass clazz,
jstring library_name,
jlong load_address,
jobject lib_info_obj) {
String lib_name(env, library_name);
LOG_INFO("Called for %s", lib_name.c_str());
if (!IsValidAddress(load_address)) {
LOG_ERROR("Invalid address 0x%llx", load_address);
return false;
}
ScopedLibrary library;
if (!crazy_library_find_by_name(lib_name.c_str(), library.GetPtr())) {
LOG_ERROR("Could not find %s", lib_name.c_str());
return false;
}
crazy_context_t* context = GetCrazyContext();
size_t relro_start = 0;
size_t relro_size = 0;
int relro_fd = -1;
if (!crazy_library_create_shared_relro(library.Get(),
context,
static_cast<size_t>(load_address),
&relro_start,
&relro_size,
&relro_fd)) {
LOG_ERROR("Could not create shared RELRO sharing for %s: %s\n",
lib_name.c_str(), crazy_context_get_error(context));
return false;
}
s_lib_info_fields.SetRelroInfo(env,
lib_info_obj,
relro_start, relro_size, relro_fd);
return true;
}
jboolean UseSharedRelro(JNIEnv* env,
jclass clazz,
jstring library_name,
jobject lib_info_obj) {
String lib_name(env, library_name);
LOG_INFO("Called for %s, lib_info_ref=%p", lib_name.c_str(), lib_info_obj);
ScopedLibrary library;
if (!crazy_library_find_by_name(lib_name.c_str(), library.GetPtr())) {
LOG_ERROR("Could not find %s", lib_name.c_str());
return false;
}
crazy_context_t* context = GetCrazyContext();
size_t relro_start = 0;
size_t relro_size = 0;
int relro_fd = -1;
s_lib_info_fields.GetRelroInfo(env,
lib_info_obj,
&relro_start, &relro_size, &relro_fd);
LOG_INFO("library=%s relro start=%p size=%p fd=%d",
lib_name.c_str(), (void*)relro_start, (void*)relro_size, relro_fd);
if (!crazy_library_use_shared_relro(library.Get(),
context,
relro_start, relro_size, relro_fd)) {
LOG_ERROR("Could not use shared RELRO for %s: %s",
lib_name.c_str(), crazy_context_get_error(context));
return false;
}
LOG_INFO("Library %s using shared RELRO section!", lib_name.c_str());
return true;
}
const JNINativeMethod kNativeMethods[] = {
{"nativeLoadLibrary",
"("
"Ljava/lang/String;"
"J"
"Lorg/chromium/base/library_loader/Linker$LibInfo;"
")"
"Z",
reinterpret_cast<void*>(&LoadLibrary)},
{"nativeLoadLibraryInZipFile",
"("
"Ljava/lang/String;"
"Ljava/lang/String;"
"J"
"Lorg/chromium/base/library_loader/Linker$LibInfo;"
")"
"Z",
reinterpret_cast<void*>(&LoadLibraryInZipFile)},
{"nativeRunCallbackOnUiThread",
"("
"J"
")"
"V",
reinterpret_cast<void*>(&RunCallbackOnUiThread)},
{"nativeCreateSharedRelro",
"("
"Ljava/lang/String;"
"J"
"Lorg/chromium/base/library_loader/Linker$LibInfo;"
")"
"Z",
reinterpret_cast<void*>(&CreateSharedRelro)},
{"nativeUseSharedRelro",
"("
"Ljava/lang/String;"
"Lorg/chromium/base/library_loader/Linker$LibInfo;"
")"
"Z",
reinterpret_cast<void*>(&UseSharedRelro)},
};
const size_t kNumNativeMethods =
sizeof(kNativeMethods) / sizeof(kNativeMethods[0]);
} // namespace
bool LegacyLinkerJNIInit(JavaVM* vm, JNIEnv* env) {
LOG_INFO("Entering");
// Initialize SDK version info.
LOG_INFO("Retrieving SDK version info");
if (!InitSDKVersionInfo(env))
return false;
// Register native methods.
jclass linker_class;
if (!InitClassReference(env,
"org/chromium/base/library_loader/LegacyLinker",
&linker_class))
return false;
LOG_INFO("Registering native methods");
if (env->RegisterNatives(linker_class, kNativeMethods, kNumNativeMethods) < 0)
return false;
// Resolve and save the Java side Linker callback class and method.
LOG_INFO("Resolving callback bindings");
if (!s_java_callback_bindings.Init(env, linker_class)) {
return false;
}
// Save JavaVM* handle into context.
crazy_context_t* context = GetCrazyContext();
crazy_context_set_java_vm(context, vm, JNI_VERSION_1_4);
// Register the function that the crazy linker can call to post code
// for later execution.
crazy_context_set_callback_poster(context, &PostForLaterExecution, nullptr);
return true;
}
} // namespace chromium_android_linker
| junhuac/MQUIC | src/base/android/linker/legacy_linker_jni.cc | C++ | mit | 16,030 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Threading;
namespace System.Net.Sockets
{
// This class implements a safe socket handle.
// It uses an inner and outer SafeHandle to do so. The inner
// SafeHandle holds the actual socket, but only ever has one
// reference to it. The outer SafeHandle guards the inner
// SafeHandle with real ref counting. When the outer SafeHandle
// is cleaned up, it releases the inner SafeHandle - since
// its ref is the only ref to the inner SafeHandle, it deterministically
// gets closed at that point - no races with concurrent IO calls.
// This allows Close() on the outer SafeHandle to deterministically
// close the inner SafeHandle, in turn allowing the inner SafeHandle
// to block the user thread in case a graceful close has been
// requested. (It's not legal to block any other thread - such closes
// are always abortive.)
internal partial class SafeCloseSocket :
#if DEBUG
DebugSafeHandleMinusOneIsInvalid
#else
SafeHandleMinusOneIsInvalid
#endif
{
protected SafeCloseSocket() : base(true) { }
private InnerSafeCloseSocket _innerSocket;
private volatile bool _released;
#if DEBUG
private InnerSafeCloseSocket _innerSocketCopy;
#endif
public override bool IsInvalid
{
get
{
return IsClosed || base.IsInvalid;
}
}
#if DEBUG
public void AddRef()
{
try
{
// The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle.
InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket);
if (innerSocket != null)
{
innerSocket.AddRef();
}
}
catch (Exception e)
{
Debug.Fail("SafeCloseSocket.AddRef after inner socket disposed." + e);
}
}
public void Release()
{
try
{
// The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle.
InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket);
if (innerSocket != null)
{
innerSocket.Release();
}
}
catch (Exception e)
{
Debug.Fail("SafeCloseSocket.Release after inner socket disposed." + e);
}
}
#endif
private void SetInnerSocket(InnerSafeCloseSocket socket)
{
_innerSocket = socket;
SetHandle(socket.DangerousGetHandle());
#if DEBUG
_innerSocketCopy = socket;
#endif
}
private static SafeCloseSocket CreateSocket(InnerSafeCloseSocket socket)
{
SafeCloseSocket ret = new SafeCloseSocket();
CreateSocket(socket, ret);
if (NetEventSource.IsEnabled) NetEventSource.Info(null, ret);
return ret;
}
protected static void CreateSocket(InnerSafeCloseSocket socket, SafeCloseSocket target)
{
if (socket != null && socket.IsInvalid)
{
target.SetHandleAsInvalid();
return;
}
bool b = false;
try
{
socket.DangerousAddRef(ref b);
}
catch
{
if (b)
{
socket.DangerousRelease();
b = false;
}
}
finally
{
if (b)
{
target.SetInnerSocket(socket);
socket.Dispose();
}
else
{
target.SetHandleAsInvalid();
}
}
}
protected override bool ReleaseHandle()
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_innerSocket={_innerSocket}");
_released = true;
InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null);
#if DEBUG
// On AppDomain unload we may still have pending Overlapped operations.
// ThreadPoolBoundHandle should handle this scenario by canceling them.
innerSocket?.LogRemainingOperations();
#endif
InnerReleaseHandle();
innerSocket?.DangerousRelease();
return true;
}
internal void CloseAsIs()
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_innerSocket={_innerSocket}");
#if DEBUG
// If this throws it could be very bad.
try
{
#endif
InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null);
Dispose();
if (innerSocket != null)
{
// Wait until it's safe.
SpinWait sw = new SpinWait();
while (!_released)
{
sw.SpinOnce();
}
// Now free it with blocking.
innerSocket.BlockingRelease();
}
InnerReleaseHandle();
#if DEBUG
}
catch (Exception exception) when (!ExceptionCheck.IsFatal(exception))
{
NetEventSource.Fail(this, $"handle:{handle}, error:{exception}");
throw;
}
#endif
}
internal sealed partial class InnerSafeCloseSocket : SafeHandleMinusOneIsInvalid
{
private InnerSafeCloseSocket() : base(true) { }
private bool _blockable;
public override bool IsInvalid
{
get
{
return IsClosed || base.IsInvalid;
}
}
// This method is implicitly reliable and called from a CER.
protected override bool ReleaseHandle()
{
bool ret = false;
#if DEBUG
try
{
#endif
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"handle:{handle}");
SocketError errorCode = InnerReleaseHandle();
return ret = errorCode == SocketError.Success;
#if DEBUG
}
catch (Exception exception)
{
if (!ExceptionCheck.IsFatal(exception))
{
NetEventSource.Fail(this, $"handle:{handle}, error:{exception}");
}
ret = true; // Avoid a second assert.
throw;
}
finally
{
_closeSocketThread = Environment.CurrentManagedThreadId;
_closeSocketTick = Environment.TickCount;
if (!ret)
{
NetEventSource.Fail(this, $"ReleaseHandle failed. handle:{handle}");
}
}
#endif
}
#if DEBUG
private IntPtr _closeSocketHandle;
private SocketError _closeSocketResult = unchecked((SocketError)0xdeadbeef);
private SocketError _closeSocketLinger = unchecked((SocketError)0xdeadbeef);
private int _closeSocketThread;
private int _closeSocketTick;
private int _refCount = 0;
public void AddRef()
{
Interlocked.Increment(ref _refCount);
}
public void Release()
{
Interlocked.MemoryBarrier();
Debug.Assert(_refCount > 0, "InnerSafeCloseSocket: Release() called more times than AddRef");
Interlocked.Decrement(ref _refCount);
}
public void LogRemainingOperations()
{
Interlocked.MemoryBarrier();
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"Releasing with pending operations: {_refCount}");
}
#endif
// Use this method to close the socket handle using the linger options specified on the socket.
// Guaranteed to only be called once, under a CER, and not if regular DangerousRelease is called.
internal void BlockingRelease()
{
#if DEBUG
// Expected to have outstanding operations such as Accept.
LogRemainingOperations();
#endif
_blockable = true;
DangerousRelease();
}
}
}
}
| nbarbettini/corefx | src/Common/src/System/Net/SafeCloseSocket.cs | C# | mit | 9,141 |
// Package api provides a generic, low-level WebDriver API client for Go.
// All methods map directly to endpoints of the WebDriver Wire Protocol:
// https://code.google.com/p/selenium/wiki/JsonWireProtocol
//
// This package was previously internal to the agouti package. It currently
// does not have a fixed API, but this will change in the near future
// (with the addition of adequate documentation).
package api
| johanbrandhorst/protobuf | vendor/github.com/sclevine/agouti/api/api.go | GO | mit | 418 |
module ActiveRecord
module AttributeMethods
module Serialization
extend ActiveSupport::Concern
module ClassMethods
# If you have an attribute that needs to be saved to the database as an
# object, and retrieved as the same object, then specify the name of that
# attribute using this method and it will be handled automatically. The
# serialization is done through YAML. If +class_name+ is specified, the
# serialized object must be of that class on assignment and retrieval.
# Otherwise SerializationTypeMismatch will be raised.
#
# Empty objects as <tt>{}</tt>, in the case of +Hash+, or <tt>[]</tt>, in the case of
# +Array+, will always be persisted as null.
#
# Keep in mind that database adapters handle certain serialization tasks
# for you. For instance: +json+ and +jsonb+ types in PostgreSQL will be
# converted between JSON object/array syntax and Ruby +Hash+ or +Array+
# objects transparently. There is no need to use #serialize in this
# case.
#
# For more complex cases, such as conversion to or from your application
# domain objects, consider using the ActiveRecord::Attributes API.
#
# ==== Parameters
#
# * +attr_name+ - The field name that should be serialized.
# * +class_name_or_coder+ - Optional, a coder object, which responds to `.load` / `.dump`
# or a class name that the object type should be equal to.
#
# ==== Example
#
# # Serialize a preferences attribute.
# class User < ActiveRecord::Base
# serialize :preferences
# end
#
# # Serialize preferences using JSON as coder.
# class User < ActiveRecord::Base
# serialize :preferences, JSON
# end
#
# # Serialize preferences as Hash using YAML coder.
# class User < ActiveRecord::Base
# serialize :preferences, Hash
# end
def serialize(attr_name, class_name_or_coder = Object)
# When ::JSON is used, force it to go through the Active Support JSON encoder
# to ensure special objects (e.g. Active Record models) are dumped correctly
# using the #as_json hook.
coder = if class_name_or_coder == ::JSON
Coders::JSON
elsif [:load, :dump].all? { |x| class_name_or_coder.respond_to?(x) }
class_name_or_coder
else
Coders::YAMLColumn.new(class_name_or_coder)
end
decorate_attribute_type(attr_name, :serialize) do |type|
Type::Serialized.new(type, coder)
end
end
end
end
end
end
| afuerstenau/daily-notes | vendor/cache/ruby/2.5.0/gems/activerecord-5.0.6/lib/active_record/attribute_methods/serialization.rb | Ruby | mit | 2,841 |
//------------------------------------------------------------------------------
// <copyright file="PerformanceCountersElement.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Net.Configuration
{
using System;
using System.Configuration;
using System.Reflection;
using System.Security.Permissions;
public sealed class PerformanceCountersElement : ConfigurationElement
{
public PerformanceCountersElement()
{
this.properties.Add(this.enabled);
}
[ConfigurationProperty(ConfigurationStrings.Enabled, DefaultValue=false)]
public bool Enabled
{
get { return (bool) this[this.enabled]; }
set { this[this.enabled] = value; }
}
protected override ConfigurationPropertyCollection Properties
{
get
{
return this.properties;
}
}
ConfigurationPropertyCollection properties = new ConfigurationPropertyCollection();
readonly ConfigurationProperty enabled =
new ConfigurationProperty(ConfigurationStrings.Enabled, typeof(bool), false,
ConfigurationPropertyOptions.None);
}
}
| sekcheong/referencesource | System/net/System/Net/Configuration/PerformanceCountersElement.cs | C# | mit | 1,389 |
//used for the media picker dialog
angular.module("umbraco")
.controller("Umbraco.Dialogs.MediaPickerController",
function ($scope, mediaResource, umbRequestHelper, entityResource, $log, mediaHelper, eventsService, treeService, $cookies, $element, $timeout, notificationsService) {
var dialogOptions = $scope.dialogOptions;
$scope.onlyImages = dialogOptions.onlyImages;
$scope.showDetails = dialogOptions.showDetails;
$scope.multiPicker = (dialogOptions.multiPicker && dialogOptions.multiPicker !== "0") ? true : false;
$scope.startNodeId = dialogOptions.startNodeId ? dialogOptions.startNodeId : -1;
$scope.cropSize = dialogOptions.cropSize;
$scope.filesUploading = 0;
$scope.dropping = false;
$scope.progress = 0;
$scope.options = {
url: umbRequestHelper.getApiUrl("mediaApiBaseUrl", "PostAddFile") + "?origin=blueimp",
autoUpload: true,
dropZone: $element.find(".umb-dialogs-mediapicker.browser"),
fileInput: $element.find("input.uploader"),
formData: {
currentFolder: -1
}
};
//preload selected item
$scope.target = undefined;
if(dialogOptions.currentTarget){
$scope.target = dialogOptions.currentTarget;
}
$scope.submitFolder = function(e) {
if (e.keyCode === 13) {
e.preventDefault();
$scope.showFolderInput = false;
mediaResource
.addFolder($scope.newFolderName, $scope.options.formData.currentFolder)
.then(function(data) {
//we've added a new folder so lets clear the tree cache for that specific item
treeService.clearCache({
cacheKey: "__media", //this is the main media tree cache key
childrenOf: data.parentId //clear the children of the parent
});
$scope.gotoFolder(data);
});
}
};
$scope.gotoFolder = function(folder) {
if(!folder){
folder = {id: -1, name: "Media", icon: "icon-folder"};
}
if (folder.id > 0) {
entityResource.getAncestors(folder.id, "media")
.then(function(anc) {
// anc.splice(0,1);
$scope.path = _.filter(anc, function (f) {
return f.path.indexOf($scope.startNodeId) !== -1;
});
});
}
else {
$scope.path = [];
}
//mediaResource.rootMedia()
mediaResource.getChildren(folder.id)
.then(function(data) {
$scope.searchTerm = "";
$scope.images = data.items ? data.items : [];
});
$scope.options.formData.currentFolder = folder.id;
$scope.currentFolder = folder;
};
//This executes prior to the whole processing which we can use to get the UI going faster,
//this also gives us the start callback to invoke to kick of the whole thing
$scope.$on('fileuploadadd', function (e, data) {
$scope.$apply(function () {
$scope.filesUploading++;
});
});
//when one is finished
$scope.$on('fileuploaddone', function (e, data) {
$scope.filesUploading--;
if ($scope.filesUploading == 0) {
$scope.$apply(function () {
$scope.progress = 0;
$scope.gotoFolder($scope.currentFolder);
});
}
//Show notifications!!!!
if (data.result && data.result.notifications && angular.isArray(data.result.notifications)) {
for (var n = 0; n < data.result.notifications.length; n++) {
notificationsService.showNotification(data.result.notifications[n]);
}
}
});
// All these sit-ups are to add dropzone area and make sure it gets removed if dragging is aborted!
$scope.$on('fileuploaddragover', function (e, data) {
if (!$scope.dragClearTimeout) {
$scope.$apply(function () {
$scope.dropping = true;
});
}
else {
$timeout.cancel($scope.dragClearTimeout);
}
$scope.dragClearTimeout = $timeout(function () {
$scope.dropping = null;
$scope.dragClearTimeout = null;
}, 300);
});
$scope.clickHandler = function(image, ev, select) {
ev.preventDefault();
if (image.isFolder && !select) {
$scope.gotoFolder(image);
}else{
eventsService.emit("dialogs.mediaPicker.select", image);
//we have 3 options add to collection (if multi) show details, or submit it right back to the callback
if ($scope.multiPicker) {
$scope.select(image);
image.cssclass = ($scope.dialogData.selection.indexOf(image) > -1) ? "selected" : "";
}else if($scope.showDetails) {
$scope.target= image;
$scope.target.url = mediaHelper.resolveFile(image);
}else{
$scope.submit(image);
}
}
};
$scope.exitDetails = function(){
if(!$scope.currentFolder){
$scope.gotoFolder();
}
$scope.target = undefined;
};
//default root item
if(!$scope.target){
$scope.gotoFolder({ id: $scope.startNodeId, name: "Media", icon: "icon-folder" });
}
}); | gregoriusxu/Umbraco-CMS | src/Umbraco.Web.UI.Client/src/views/common/dialogs/mediapicker.controller.js | JavaScript | mit | 6,762 |
package volume
import (
"bytes"
"fmt"
"io/ioutil"
"testing"
"github.com/docker/docker/api/types"
"github.com/docker/docker/cli/internal/test"
"github.com/pkg/errors"
// Import builders to get the builder function as package function
. "github.com/docker/docker/cli/internal/test/builders"
"github.com/docker/docker/pkg/testutil/assert"
"github.com/docker/docker/pkg/testutil/golden"
)
func TestVolumeInspectErrors(t *testing.T) {
testCases := []struct {
args []string
flags map[string]string
volumeInspectFunc func(volumeID string) (types.Volume, error)
expectedError string
}{
{
expectedError: "requires at least 1 argument",
},
{
args: []string{"foo"},
volumeInspectFunc: func(volumeID string) (types.Volume, error) {
return types.Volume{}, errors.Errorf("error while inspecting the volume")
},
expectedError: "error while inspecting the volume",
},
{
args: []string{"foo"},
flags: map[string]string{
"format": "{{invalid format}}",
},
expectedError: "Template parsing error",
},
{
args: []string{"foo", "bar"},
volumeInspectFunc: func(volumeID string) (types.Volume, error) {
if volumeID == "foo" {
return types.Volume{
Name: "foo",
}, nil
}
return types.Volume{}, errors.Errorf("error while inspecting the volume")
},
expectedError: "error while inspecting the volume",
},
}
for _, tc := range testCases {
buf := new(bytes.Buffer)
cmd := newInspectCommand(
test.NewFakeCli(&fakeClient{
volumeInspectFunc: tc.volumeInspectFunc,
}, buf),
)
cmd.SetArgs(tc.args)
for key, value := range tc.flags {
cmd.Flags().Set(key, value)
}
cmd.SetOutput(ioutil.Discard)
assert.Error(t, cmd.Execute(), tc.expectedError)
}
}
func TestVolumeInspectWithoutFormat(t *testing.T) {
testCases := []struct {
name string
args []string
volumeInspectFunc func(volumeID string) (types.Volume, error)
}{
{
name: "single-volume",
args: []string{"foo"},
volumeInspectFunc: func(volumeID string) (types.Volume, error) {
if volumeID != "foo" {
return types.Volume{}, errors.Errorf("Invalid volumeID, expected %s, got %s", "foo", volumeID)
}
return *Volume(), nil
},
},
{
name: "multiple-volume-with-labels",
args: []string{"foo", "bar"},
volumeInspectFunc: func(volumeID string) (types.Volume, error) {
return *Volume(VolumeName(volumeID), VolumeLabels(map[string]string{
"foo": "bar",
})), nil
},
},
}
for _, tc := range testCases {
buf := new(bytes.Buffer)
cmd := newInspectCommand(
test.NewFakeCli(&fakeClient{
volumeInspectFunc: tc.volumeInspectFunc,
}, buf),
)
cmd.SetArgs(tc.args)
assert.NilError(t, cmd.Execute())
actual := buf.String()
expected := golden.Get(t, []byte(actual), fmt.Sprintf("volume-inspect-without-format.%s.golden", tc.name))
assert.EqualNormalizedString(t, assert.RemoveSpace, actual, string(expected))
}
}
func TestVolumeInspectWithFormat(t *testing.T) {
volumeInspectFunc := func(volumeID string) (types.Volume, error) {
return *Volume(VolumeLabels(map[string]string{
"foo": "bar",
})), nil
}
testCases := []struct {
name string
format string
args []string
volumeInspectFunc func(volumeID string) (types.Volume, error)
}{
{
name: "simple-template",
format: "{{.Name}}",
args: []string{"foo"},
volumeInspectFunc: volumeInspectFunc,
},
{
name: "json-template",
format: "{{json .Labels}}",
args: []string{"foo"},
volumeInspectFunc: volumeInspectFunc,
},
}
for _, tc := range testCases {
buf := new(bytes.Buffer)
cmd := newInspectCommand(
test.NewFakeCli(&fakeClient{
volumeInspectFunc: tc.volumeInspectFunc,
}, buf),
)
cmd.SetArgs(tc.args)
cmd.Flags().Set("format", tc.format)
assert.NilError(t, cmd.Execute())
actual := buf.String()
expected := golden.Get(t, []byte(actual), fmt.Sprintf("volume-inspect-with-format.%s.golden", tc.name))
assert.EqualNormalizedString(t, assert.RemoveSpace, actual, string(expected))
}
}
| Originate/exosphere | vendor/github.com/moby/moby/cli/command/volume/inspect_test.go | GO | mit | 4,211 |
/*
*********************************************************************************************
System Loader Implementation
- Implemented to https://github.com/jorendorff/js-loaders/blob/master/browser-loader.js
- <script type="module"> supported
*********************************************************************************************
*/
var System;
function SystemLoader() {
Loader.call(this);
this.paths = {};
}
// NB no specification provided for System.paths, used ideas discussed in https://github.com/jorendorff/js-loaders/issues/25
function applyPaths(paths, name) {
// most specific (most number of slashes in path) match wins
var pathMatch = '', wildcard, maxSlashCount = 0;
// check to see if we have a paths entry
for (var p in paths) {
var pathParts = p.split('*');
if (pathParts.length > 2)
throw new TypeError('Only one wildcard in a path is permitted');
// exact path match
if (pathParts.length == 1) {
if (name == p) {
pathMatch = p;
break;
}
}
// wildcard path match
else {
var slashCount = p.split('/').length;
if (slashCount >= maxSlashCount &&
name.substr(0, pathParts[0].length) == pathParts[0] &&
name.substr(name.length - pathParts[1].length) == pathParts[1]) {
maxSlashCount = slashCount;
pathMatch = p;
wildcard = name.substr(pathParts[0].length, name.length - pathParts[1].length - pathParts[0].length);
}
}
}
var outPath = paths[pathMatch] || name;
if (typeof wildcard == 'string')
outPath = outPath.replace('*', wildcard);
return outPath;
}
// inline Object.create-style class extension
function LoaderProto() {}
LoaderProto.prototype = Loader.prototype;
SystemLoader.prototype = new LoaderProto();
| nfl/es6-module-loader | src/system.js | JavaScript | mit | 1,825 |
/// -*- tab-width: 4; Mode: C++; c-basic-offset: 4; indent-tabs-mode: nil -*-
#include <AP_HAL.h>
#include "AP_InertialSensor_MPU6000.h"
extern const AP_HAL::HAL& hal;
// MPU6000 accelerometer scaling
#define MPU6000_ACCEL_SCALE_1G (GRAVITY_MSS / 4096.0f)
// MPU 6000 registers
#define MPUREG_XG_OFFS_TC 0x00
#define MPUREG_YG_OFFS_TC 0x01
#define MPUREG_ZG_OFFS_TC 0x02
#define MPUREG_X_FINE_GAIN 0x03
#define MPUREG_Y_FINE_GAIN 0x04
#define MPUREG_Z_FINE_GAIN 0x05
#define MPUREG_XA_OFFS_H 0x06 // X axis accelerometer offset (high byte)
#define MPUREG_XA_OFFS_L 0x07 // X axis accelerometer offset (low byte)
#define MPUREG_YA_OFFS_H 0x08 // Y axis accelerometer offset (high byte)
#define MPUREG_YA_OFFS_L 0x09 // Y axis accelerometer offset (low byte)
#define MPUREG_ZA_OFFS_H 0x0A // Z axis accelerometer offset (high byte)
#define MPUREG_ZA_OFFS_L 0x0B // Z axis accelerometer offset (low byte)
#define MPUREG_PRODUCT_ID 0x0C // Product ID Register
#define MPUREG_XG_OFFS_USRH 0x13 // X axis gyro offset (high byte)
#define MPUREG_XG_OFFS_USRL 0x14 // X axis gyro offset (low byte)
#define MPUREG_YG_OFFS_USRH 0x15 // Y axis gyro offset (high byte)
#define MPUREG_YG_OFFS_USRL 0x16 // Y axis gyro offset (low byte)
#define MPUREG_ZG_OFFS_USRH 0x17 // Z axis gyro offset (high byte)
#define MPUREG_ZG_OFFS_USRL 0x18 // Z axis gyro offset (low byte)
#define MPUREG_SMPLRT_DIV 0x19 // sample rate. Fsample= 1Khz/(<this value>+1) = 200Hz
# define MPUREG_SMPLRT_1000HZ 0x00
# define MPUREG_SMPLRT_500HZ 0x01
# define MPUREG_SMPLRT_250HZ 0x03
# define MPUREG_SMPLRT_200HZ 0x04
# define MPUREG_SMPLRT_100HZ 0x09
# define MPUREG_SMPLRT_50HZ 0x13
#define MPUREG_CONFIG 0x1A
#define MPUREG_GYRO_CONFIG 0x1B
// bit definitions for MPUREG_GYRO_CONFIG
# define BITS_GYRO_FS_250DPS 0x00
# define BITS_GYRO_FS_500DPS 0x08
# define BITS_GYRO_FS_1000DPS 0x10
# define BITS_GYRO_FS_2000DPS 0x18
# define BITS_GYRO_FS_MASK 0x18 // only bits 3 and 4 are used for gyro full scale so use this to mask off other bits
# define BITS_GYRO_ZGYRO_SELFTEST 0x20
# define BITS_GYRO_YGYRO_SELFTEST 0x40
# define BITS_GYRO_XGYRO_SELFTEST 0x80
#define MPUREG_ACCEL_CONFIG 0x1C
#define MPUREG_MOT_THR 0x1F // detection threshold for Motion interrupt generation. Motion is detected when the absolute value of any of the accelerometer measurements exceeds this
#define MPUREG_MOT_DUR 0x20 // duration counter threshold for Motion interrupt generation. The duration counter ticks at 1 kHz, therefore MOT_DUR has a unit of 1 LSB = 1 ms
#define MPUREG_ZRMOT_THR 0x21 // detection threshold for Zero Motion interrupt generation.
#define MPUREG_ZRMOT_DUR 0x22 // duration counter threshold for Zero Motion interrupt generation. The duration counter ticks at 16 Hz, therefore ZRMOT_DUR has a unit of 1 LSB = 64 ms.
#define MPUREG_FIFO_EN 0x23
#define MPUREG_INT_PIN_CFG 0x37
# define BIT_INT_RD_CLEAR 0x10 // clear the interrupt when any read occurs
# define BIT_LATCH_INT_EN 0x20 // latch data ready pin
#define MPUREG_INT_ENABLE 0x38
// bit definitions for MPUREG_INT_ENABLE
# define BIT_RAW_RDY_EN 0x01
# define BIT_DMP_INT_EN 0x02 // enabling this bit (DMP_INT_EN) also enables RAW_RDY_EN it seems
# define BIT_UNKNOWN_INT_EN 0x04
# define BIT_I2C_MST_INT_EN 0x08
# define BIT_FIFO_OFLOW_EN 0x10
# define BIT_ZMOT_EN 0x20
# define BIT_MOT_EN 0x40
# define BIT_FF_EN 0x80
#define MPUREG_INT_STATUS 0x3A
// bit definitions for MPUREG_INT_STATUS (same bit pattern as above because this register shows what interrupt actually fired)
# define BIT_RAW_RDY_INT 0x01
# define BIT_DMP_INT 0x02
# define BIT_UNKNOWN_INT 0x04
# define BIT_I2C_MST_INT 0x08
# define BIT_FIFO_OFLOW_INT 0x10
# define BIT_ZMOT_INT 0x20
# define BIT_MOT_INT 0x40
# define BIT_FF_INT 0x80
#define MPUREG_ACCEL_XOUT_H 0x3B
#define MPUREG_ACCEL_XOUT_L 0x3C
#define MPUREG_ACCEL_YOUT_H 0x3D
#define MPUREG_ACCEL_YOUT_L 0x3E
#define MPUREG_ACCEL_ZOUT_H 0x3F
#define MPUREG_ACCEL_ZOUT_L 0x40
#define MPUREG_TEMP_OUT_H 0x41
#define MPUREG_TEMP_OUT_L 0x42
#define MPUREG_GYRO_XOUT_H 0x43
#define MPUREG_GYRO_XOUT_L 0x44
#define MPUREG_GYRO_YOUT_H 0x45
#define MPUREG_GYRO_YOUT_L 0x46
#define MPUREG_GYRO_ZOUT_H 0x47
#define MPUREG_GYRO_ZOUT_L 0x48
#define MPUREG_USER_CTRL 0x6A
// bit definitions for MPUREG_USER_CTRL
# define BIT_USER_CTRL_SIG_COND_RESET 0x01 // resets signal paths and results registers for all sensors (gyros, accel, temp)
# define BIT_USER_CTRL_I2C_MST_RESET 0x02 // reset I2C Master (only applicable if I2C_MST_EN bit is set)
# define BIT_USER_CTRL_FIFO_RESET 0x04 // Reset (i.e. clear) FIFO buffer
# define BIT_USER_CTRL_DMP_RESET 0x08 // Reset DMP
# define BIT_USER_CTRL_I2C_IF_DIS 0x10 // Disable primary I2C interface and enable hal.spi->interface
# define BIT_USER_CTRL_I2C_MST_EN 0x20 // Enable MPU to act as the I2C Master to external slave sensors
# define BIT_USER_CTRL_FIFO_EN 0x40 // Enable FIFO operations
# define BIT_USER_CTRL_DMP_EN 0x80 // Enable DMP operations
#define MPUREG_PWR_MGMT_1 0x6B
# define BIT_PWR_MGMT_1_CLK_INTERNAL 0x00 // clock set to internal 8Mhz oscillator
# define BIT_PWR_MGMT_1_CLK_XGYRO 0x01 // PLL with X axis gyroscope reference
# define BIT_PWR_MGMT_1_CLK_YGYRO 0x02 // PLL with Y axis gyroscope reference
# define BIT_PWR_MGMT_1_CLK_ZGYRO 0x03 // PLL with Z axis gyroscope reference
# define BIT_PWR_MGMT_1_CLK_EXT32KHZ 0x04 // PLL with external 32.768kHz reference
# define BIT_PWR_MGMT_1_CLK_EXT19MHZ 0x05 // PLL with external 19.2MHz reference
# define BIT_PWR_MGMT_1_CLK_STOP 0x07 // Stops the clock and keeps the timing generator in reset
# define BIT_PWR_MGMT_1_TEMP_DIS 0x08 // disable temperature sensor
# define BIT_PWR_MGMT_1_CYCLE 0x20 // put sensor into cycle mode. cycles between sleep mode and waking up to take a single sample of data from active sensors at a rate determined by LP_WAKE_CTRL
# define BIT_PWR_MGMT_1_SLEEP 0x40 // put sensor into low power sleep mode
# define BIT_PWR_MGMT_1_DEVICE_RESET 0x80 // reset entire device
#define MPUREG_PWR_MGMT_2 0x6C // allows the user to configure the frequency of wake-ups in Accelerometer Only Low Power Mode
#define MPUREG_BANK_SEL 0x6D // DMP bank selection register (used to indirectly access DMP registers)
#define MPUREG_MEM_START_ADDR 0x6E // DMP memory start address (used to indirectly write to dmp memory)
#define MPUREG_MEM_R_W 0x6F // DMP related register
#define MPUREG_DMP_CFG_1 0x70 // DMP related register
#define MPUREG_DMP_CFG_2 0x71 // DMP related register
#define MPUREG_FIFO_COUNTH 0x72
#define MPUREG_FIFO_COUNTL 0x73
#define MPUREG_FIFO_R_W 0x74
#define MPUREG_WHOAMI 0x75
// Configuration bits MPU 3000 and MPU 6000 (not revised)?
#define BITS_DLPF_CFG_256HZ_NOLPF2 0x00
#define BITS_DLPF_CFG_188HZ 0x01
#define BITS_DLPF_CFG_98HZ 0x02
#define BITS_DLPF_CFG_42HZ 0x03
#define BITS_DLPF_CFG_20HZ 0x04
#define BITS_DLPF_CFG_10HZ 0x05
#define BITS_DLPF_CFG_5HZ 0x06
#define BITS_DLPF_CFG_2100HZ_NOLPF 0x07
#define BITS_DLPF_CFG_MASK 0x07
// Product ID Description for MPU6000
// high 4 bits low 4 bits
// Product Name Product Revision
#define MPU6000ES_REV_C4 0x14 // 0001 0100
#define MPU6000ES_REV_C5 0x15 // 0001 0101
#define MPU6000ES_REV_D6 0x16 // 0001 0110
#define MPU6000ES_REV_D7 0x17 // 0001 0111
#define MPU6000ES_REV_D8 0x18 // 0001 1000
#define MPU6000_REV_C4 0x54 // 0101 0100
#define MPU6000_REV_C5 0x55 // 0101 0101
#define MPU6000_REV_D6 0x56 // 0101 0110
#define MPU6000_REV_D7 0x57 // 0101 0111
#define MPU6000_REV_D8 0x58 // 0101 1000
#define MPU6000_REV_D9 0x59 // 0101 1001
// DMP output rate constants
#define MPU6000_200HZ 0x00 // default value
#define MPU6000_100HZ 0x01
#define MPU6000_66HZ 0x02
#define MPU6000_50HZ 0x03
// DMP FIFO constants
// Default quaternion FIFO size (4*4) + Footer(2)
#define FIFO_PACKET_SIZE 18
// Rate of the gyro bias from gravity correction (200Hz/4) => 50Hz
#define GYRO_BIAS_FROM_GRAVITY_RATE 4
// Default gain for accel fusion (with gyros)
#define DEFAULT_ACCEL_FUSION_GAIN 0x80
/*
* RM-MPU-6000A-00.pdf, page 33, section 4.25 lists LSB sensitivity of
* gyro as 16.4 LSB/DPS at scale factor of +/- 2000dps (FS_SEL==3)
*/
const float AP_InertialSensor_MPU6000::_gyro_scale = (0.0174532 / 16.4);
/* pch: I believe the accel and gyro indicies are correct
* but somone else should please confirm.
*
* jamesjb: Y and Z axes are flipped on the PX4FMU
*/
const uint8_t AP_InertialSensor_MPU6000::_gyro_data_index[3] = { 5, 4, 6 };
const uint8_t AP_InertialSensor_MPU6000::_accel_data_index[3] = { 1, 0, 2 };
#if CONFIG_HAL_BOARD == HAL_BOARD_SMACCM
const int8_t AP_InertialSensor_MPU6000::_gyro_data_sign[3] = { 1, -1, 1 };
const int8_t AP_InertialSensor_MPU6000::_accel_data_sign[3] = { 1, -1, 1 };
#else
const int8_t AP_InertialSensor_MPU6000::_gyro_data_sign[3] = { 1, 1, -1 };
const int8_t AP_InertialSensor_MPU6000::_accel_data_sign[3] = { 1, 1, -1 };
#endif
const uint8_t AP_InertialSensor_MPU6000::_temp_data_index = 3;
int16_t AP_InertialSensor_MPU6000::_mpu6000_product_id = AP_PRODUCT_ID_NONE;
AP_HAL::DigitalSource *AP_InertialSensor_MPU6000::_drdy_pin = NULL;
// time we start collecting sample (reset on update)
// time latest sample was collected
static volatile uint32_t _last_sample_time_micros = 0;
// DMP related static variables
bool AP_InertialSensor_MPU6000::_dmp_initialised = false;
// high byte of number of elements in fifo buffer
uint8_t AP_InertialSensor_MPU6000::_fifoCountH;
// low byte of number of elements in fifo buffer
uint8_t AP_InertialSensor_MPU6000::_fifoCountL;
// holds the 4 quaternions representing attitude taken directly from the DMP
Quaternion AP_InertialSensor_MPU6000::quaternion;
/* Static SPI device driver */
AP_HAL::SPIDeviceDriver* AP_InertialSensor_MPU6000::_spi = NULL;
AP_HAL::Semaphore* AP_InertialSensor_MPU6000::_spi_sem = NULL;
/*
* RM-MPU-6000A-00.pdf, page 31, section 4.23 lists LSB sensitivity of
* accel as 4096 LSB/mg at scale factor of +/- 8g (AFS_SEL==2)
*
* See note below about accel scaling of engineering sample MPU6k
* variants however
*/
AP_InertialSensor_MPU6000::AP_InertialSensor_MPU6000() : AP_InertialSensor()
{
_temp = 0;
_initialised = false;
_dmp_initialised = false;
}
uint16_t AP_InertialSensor_MPU6000::_init_sensor( Sample_rate sample_rate )
{
if (_initialised) return _mpu6000_product_id;
_initialised = true;
_spi = hal.spi->device(AP_HAL::SPIDevice_MPU6000);
_spi_sem = _spi->get_semaphore();
/* Pin 70 defined especially to hook
up PE6 to the hal.gpio abstraction.
(It is not a valid pin under Arduino.) */
_drdy_pin = hal.gpio->channel(70);
hal.scheduler->suspend_timer_procs();
uint8_t tries = 0;
do {
bool success = hardware_init(sample_rate);
if (success) {
hal.scheduler->delay(5+2);
if (_data_ready()) {
break;
} else {
hal.console->println_P(
PSTR("MPU6000 startup failed: no data ready"));
}
}
if (tries++ > 5) {
hal.scheduler->panic(PSTR("PANIC: failed to boot MPU6000 5 times"));
}
} while (1);
hal.scheduler->resume_timer_procs();
/* read the first lot of data.
* _read_data_transaction requires the spi semaphore to be taken by
* its caller. */
_last_sample_time_micros = hal.scheduler->micros();
_read_data_transaction();
// start the timer process to read samples
hal.scheduler->register_timer_process(_poll_data);
#if MPU6000_DEBUG
_dump_registers();
#endif
return _mpu6000_product_id;
}
// accumulation in ISR - must be read with interrupts disabled
// the sum of the values since last read
static volatile int32_t _sum[7];
// how many values we've accumulated since last read
static volatile uint16_t _count;
/*================ AP_INERTIALSENSOR PUBLIC INTERFACE ==================== */
void AP_InertialSensor_MPU6000::wait_for_sample()
{
uint32_t tstart = hal.scheduler->micros();
while (num_samples_available() == 0) {
uint32_t now = hal.scheduler->micros();
uint32_t dt = now - tstart;
if (dt > 50000) {
hal.scheduler->panic(
PSTR("PANIC: AP_InertialSensor_MPU6000::update "
"waited 50ms for data from interrupt"));
}
}
}
bool AP_InertialSensor_MPU6000::update( void )
{
int32_t sum[7];
float count_scale;
Vector3f accel_scale = _accel_scale.get();
// wait for at least 1 sample
wait_for_sample();
// disable timer procs for mininum time
hal.scheduler->suspend_timer_procs();
/** ATOMIC SECTION w/r/t TIMER PROCESS */
{
for (int i=0; i<7; i++) {
sum[i] = _sum[i];
_sum[i] = 0;
}
_num_samples = _count;
_count = 0;
}
hal.scheduler->resume_timer_procs();
count_scale = 1.0f / _num_samples;
_gyro = Vector3f(_gyro_data_sign[0] * sum[_gyro_data_index[0]],
_gyro_data_sign[1] * sum[_gyro_data_index[1]],
_gyro_data_sign[2] * sum[_gyro_data_index[2]]);
_gyro.rotate(_board_orientation);
_gyro *= _gyro_scale * count_scale;
_gyro -= _gyro_offset;
_accel = Vector3f(_accel_data_sign[0] * sum[_accel_data_index[0]],
_accel_data_sign[1] * sum[_accel_data_index[1]],
_accel_data_sign[2] * sum[_accel_data_index[2]]);
_accel.rotate(_board_orientation);
_accel *= count_scale * MPU6000_ACCEL_SCALE_1G;
_accel.x *= accel_scale.x;
_accel.y *= accel_scale.y;
_accel.z *= accel_scale.z;
_accel -= _accel_offset;
_temp = _temp_to_celsius(sum[_temp_data_index] * count_scale);
if (_last_filter_hz != _mpu6000_filter) {
if (_spi_sem->take(10)) {
_set_filter_register(_mpu6000_filter, 0);
_spi_sem->give();
}
}
return true;
}
/*================ HARDWARE FUNCTIONS ==================== */
/**
* Return true if the MPU6000 has new data available for reading.
*
* We use the data ready pin if it is available. Otherwise, read the
* status register.
*/
bool AP_InertialSensor_MPU6000::_data_ready()
{
if (_drdy_pin) {
return _drdy_pin->read() != 0;
}
if (hal.scheduler->in_timerprocess()) {
bool got = _spi_sem->take_nonblocking();
if (got) {
uint8_t status = _register_read(MPUREG_INT_STATUS);
_spi_sem->give();
return (status & BIT_RAW_RDY_INT) != 0;
} else {
return false;
}
} else {
bool got = _spi_sem->take(10);
if (got) {
uint8_t status = _register_read(MPUREG_INT_STATUS);
_spi_sem->give();
return (status & BIT_RAW_RDY_INT) != 0;
} else {
hal.scheduler->panic(
PSTR("PANIC: AP_InertialSensor_MPU6000::_data_ready failed to "
"take SPI semaphore synchronously"));
}
}
return false;
}
/**
* Timer process to poll for new data from the MPU6000.
*/
void AP_InertialSensor_MPU6000::_poll_data(uint32_t now)
{
if (_data_ready()) {
if (hal.scheduler->in_timerprocess()) {
_read_data_from_timerprocess();
} else {
/* Synchronous read - take semaphore */
bool got = _spi_sem->take(10);
if (got) {
_last_sample_time_micros = hal.scheduler->micros();
_read_data_transaction();
_spi_sem->give();
} else {
hal.scheduler->panic(
PSTR("PANIC: AP_InertialSensor_MPU6000::_poll_data "
"failed to take SPI semaphore synchronously"));
}
}
}
}
/*
* this is called from the _poll_data, in the timer process context.
* when the MPU6000 has new sensor data available and add it to _sum[] to
* ensure this is the case, these other devices must perform their spi reads
* after being called by the AP_TimerProcess.
*/
void AP_InertialSensor_MPU6000::_read_data_from_timerprocess()
{
static uint8_t semfail_ctr = 0;
bool got = _spi_sem->take_nonblocking();
if (!got) {
semfail_ctr++;
if (semfail_ctr > 100) {
hal.scheduler->panic(PSTR("PANIC: failed to take SPI semaphore "
"100 times in AP_InertialSensor_MPU6000::"
"_read_data_from_timerprocess"));
}
return;
} else {
semfail_ctr = 0;
}
_last_sample_time_micros = hal.scheduler->micros();
_read_data_transaction();
_spi_sem->give();
}
void AP_InertialSensor_MPU6000::_read_data_transaction() {
/* one resister address followed by seven 2-byte registers */
uint8_t tx[15];
uint8_t rx[15];
memset(tx,0,15);
tx[0] = MPUREG_ACCEL_XOUT_H | 0x80;
_spi->transaction(tx, rx, 15);
for (uint8_t i = 0; i < 7; i++) {
_sum[i] += (int16_t)(((uint16_t)rx[2*i+1] << 8) | rx[2*i+2]);
}
_count++;
if (_count == 0) {
// rollover - v unlikely
memset((void*)_sum, 0, sizeof(_sum));
}
// should also read FIFO data if enabled
if( _dmp_initialised ) {
if( FIFO_ready() ) {
FIFO_getPacket();
}
}
}
uint8_t AP_InertialSensor_MPU6000::_register_read( uint8_t reg )
{
uint8_t addr = reg | 0x80; // Set most significant bit
uint8_t tx[2];
uint8_t rx[2];
tx[0] = addr;
tx[1] = 0;
_spi->transaction(tx, rx, 2);
return rx[1];
}
void AP_InertialSensor_MPU6000::register_write(uint8_t reg, uint8_t val)
{
uint8_t tx[2];
uint8_t rx[2];
tx[0] = reg;
tx[1] = val;
_spi->transaction(tx, rx, 2);
}
/*
set the DLPF filter frequency. Assumes caller has taken semaphore
*/
void AP_InertialSensor_MPU6000::_set_filter_register(uint8_t filter_hz, uint8_t default_filter)
{
uint8_t filter = default_filter;
// choose filtering frequency
switch (filter_hz) {
case 5:
filter = BITS_DLPF_CFG_5HZ;
break;
case 10:
filter = BITS_DLPF_CFG_10HZ;
break;
case 20:
filter = BITS_DLPF_CFG_20HZ;
break;
case 42:
filter = BITS_DLPF_CFG_42HZ;
break;
case 98:
filter = BITS_DLPF_CFG_98HZ;
break;
}
if (filter != 0) {
_last_filter_hz = filter_hz;
register_write(MPUREG_CONFIG, filter);
}
}
bool AP_InertialSensor_MPU6000::hardware_init(Sample_rate sample_rate)
{
if (!_spi_sem->take(100)) {
hal.scheduler->panic(PSTR("MPU6000: Unable to get semaphore"));
}
// Chip reset
uint8_t tries;
for (tries = 0; tries<5; tries++) {
register_write(MPUREG_PWR_MGMT_1, BIT_PWR_MGMT_1_DEVICE_RESET);
hal.scheduler->delay(100);
// Wake up device and select GyroZ clock. Note that the
// MPU6000 starts up in sleep mode, and it can take some time
// for it to come out of sleep
register_write(MPUREG_PWR_MGMT_1, BIT_PWR_MGMT_1_CLK_ZGYRO);
hal.scheduler->delay(5);
// check it has woken up
if (_register_read(MPUREG_PWR_MGMT_1) == BIT_PWR_MGMT_1_CLK_ZGYRO) {
break;
}
#if MPU6000_DEBUG
_dump_registers();
#endif
}
if (tries == 5) {
hal.console->println_P(PSTR("Failed to boot MPU6000 5 times"));
_spi_sem->give();
return false;
}
register_write(MPUREG_PWR_MGMT_2, 0x00); // only used for wake-up in accelerometer only low power mode
hal.scheduler->delay(1);
// Disable I2C bus (recommended on datasheet)
register_write(MPUREG_USER_CTRL, BIT_USER_CTRL_I2C_IF_DIS);
hal.scheduler->delay(1);
uint8_t default_filter;
// sample rate and filtering
// to minimise the effects of aliasing we choose a filter
// that is less than half of the sample rate
switch (sample_rate) {
case RATE_50HZ:
// this is used for plane and rover, where noise resistance is
// more important than update rate. Tests on an aerobatic plane
// show that 10Hz is fine, and makes it very noise resistant
default_filter = BITS_DLPF_CFG_10HZ;
_sample_shift = 2;
break;
case RATE_100HZ:
default_filter = BITS_DLPF_CFG_20HZ;
_sample_shift = 1;
break;
case RATE_200HZ:
default:
default_filter = BITS_DLPF_CFG_20HZ;
_sample_shift = 0;
break;
}
_set_filter_register(_mpu6000_filter, default_filter);
// set sample rate to 200Hz, and use _sample_divider to give
// the requested rate to the application
register_write(MPUREG_SMPLRT_DIV, MPUREG_SMPLRT_200HZ);
hal.scheduler->delay(1);
register_write(MPUREG_GYRO_CONFIG, BITS_GYRO_FS_2000DPS); // Gyro scale 2000º/s
hal.scheduler->delay(1);
// read the product ID rev c has 1/2 the sensitivity of rev d
_mpu6000_product_id = _register_read(MPUREG_PRODUCT_ID);
//Serial.printf("Product_ID= 0x%x\n", (unsigned) _mpu6000_product_id);
if ((_mpu6000_product_id == MPU6000ES_REV_C4) || (_mpu6000_product_id == MPU6000ES_REV_C5) ||
(_mpu6000_product_id == MPU6000_REV_C4) || (_mpu6000_product_id == MPU6000_REV_C5)) {
// Accel scale 8g (4096 LSB/g)
// Rev C has different scaling than rev D
register_write(MPUREG_ACCEL_CONFIG,1<<3);
} else {
// Accel scale 8g (4096 LSB/g)
register_write(MPUREG_ACCEL_CONFIG,2<<3);
}
hal.scheduler->delay(1);
// configure interrupt to fire when new data arrives
register_write(MPUREG_INT_ENABLE, BIT_RAW_RDY_EN);
hal.scheduler->delay(1);
// clear interrupt on any read, and hold the data ready pin high
// until we clear the interrupt
register_write(MPUREG_INT_PIN_CFG, BIT_INT_RD_CLEAR | BIT_LATCH_INT_EN);
hal.scheduler->delay(1);
_spi_sem->give();
return true;
}
float AP_InertialSensor_MPU6000::_temp_to_celsius ( uint16_t regval )
{
/* TODO */
return 20.0;
}
// return the MPU6k gyro drift rate in radian/s/s
// note that this is much better than the oilpan gyros
float AP_InertialSensor_MPU6000::get_gyro_drift_rate(void)
{
// 0.5 degrees/second/minute
return ToRad(0.5/60);
}
// get number of samples read from the sensors
uint16_t AP_InertialSensor_MPU6000::num_samples_available()
{
_poll_data(0);
return _count >> _sample_shift;
}
#if MPU6000_DEBUG
// dump all config registers - used for debug
void AP_InertialSensor_MPU6000::_dump_registers(void)
{
hal.console->println_P(PSTR("MPU6000 registers"));
for (uint8_t reg=MPUREG_PRODUCT_ID; reg<=108; reg++) {
uint8_t v = _register_read(reg);
hal.console->printf_P(PSTR("%02x:%02x "), (unsigned)reg, (unsigned)v);
if ((reg - (MPUREG_PRODUCT_ID-1)) % 16 == 0) {
hal.console->println();
}
}
hal.console->println();
}
#endif
// get_delta_time returns the time period in seconds overwhich the sensor data was collected
float AP_InertialSensor_MPU6000::get_delta_time()
{
// the sensor runs at 200Hz
return 0.005 * _num_samples;
}
// Update gyro offsets with new values. Offsets provided in as scaled deg/sec values
void AP_InertialSensor_MPU6000::push_gyro_offsets_to_dmp()
{
Vector3f gyro_offsets = _gyro_offset.get();
int16_t offsetX = gyro_offsets.x / _gyro_scale * _gyro_data_sign[0];
int16_t offsetY = gyro_offsets.y / _gyro_scale * _gyro_data_sign[1];
int16_t offsetZ = gyro_offsets.z / _gyro_scale * _gyro_data_sign[2];
set_dmp_gyro_offsets(offsetX, offsetY, offsetZ);
// remove ins level offsets to avoid double counting
gyro_offsets.x = 0;
gyro_offsets.y = 0;
gyro_offsets.z = 0;
_gyro_offset = gyro_offsets;
}
// Update gyro offsets with new values. New offset values are substracted to actual offset values.
// offset values in gyro LSB units (as read from registers)
void AP_InertialSensor_MPU6000::set_dmp_gyro_offsets(int16_t offsetX, int16_t offsetY, int16_t offsetZ)
{
int16_t aux_int;
if (offsetX != 0) {
// Read actual value
aux_int = (_register_read(MPUREG_XG_OFFS_USRH)<<8) | _register_read(MPUREG_XG_OFFS_USRL);
aux_int -= offsetX<<1; // Adjust to internal units
// Write to MPU registers
register_write(MPUREG_XG_OFFS_USRH, (aux_int>>8)&0xFF);
register_write(MPUREG_XG_OFFS_USRL, aux_int&0xFF);
}
if (offsetY != 0) {
aux_int = (_register_read(MPUREG_YG_OFFS_USRH)<<8) | _register_read(MPUREG_YG_OFFS_USRL);
aux_int -= offsetY<<1; // Adjust to internal units
// Write to MPU registers
register_write(MPUREG_YG_OFFS_USRH, (aux_int>>8)&0xFF);
register_write(MPUREG_YG_OFFS_USRL, aux_int&0xFF);
}
if (offsetZ != 0) {
aux_int = (_register_read(MPUREG_ZG_OFFS_USRH)<<8) | _register_read(MPUREG_ZG_OFFS_USRL);
aux_int -= offsetZ<<1; // Adjust to internal units
// Write to MPU registers
register_write(MPUREG_ZG_OFFS_USRH, (aux_int>>8)&0xFF);
register_write(MPUREG_ZG_OFFS_USRL, aux_int&0xFF);
}
}
// Update accel offsets with new values. Offsets provided in as scaled values (1G)
void AP_InertialSensor_MPU6000::push_accel_offsets_to_dmp()
{
Vector3f accel_offset = _accel_offset.get();
Vector3f accel_scale = _accel_scale.get();
int16_t offsetX = accel_offset.x / (accel_scale.x * _accel_data_sign[0] * MPU6000_ACCEL_SCALE_1G);
int16_t offsetY = accel_offset.y / (accel_scale.y * _accel_data_sign[1] * MPU6000_ACCEL_SCALE_1G);
int16_t offsetZ = accel_offset.z / (accel_scale.z * _accel_data_sign[2] * MPU6000_ACCEL_SCALE_1G);
// strangely x and y are reversed
set_dmp_accel_offsets(offsetY, offsetX, offsetZ);
}
// set_accel_offsets - adds an offset to acceleromter readings
// This is useful for dynamic acceleration correction (for example centripetal force correction)
// and for the initial offset calibration
// Input, accel offsets for X,Y and Z in LSB units (as read from raw values)
void AP_InertialSensor_MPU6000::set_dmp_accel_offsets(int16_t offsetX, int16_t offsetY, int16_t offsetZ)
{
int aux_int;
uint8_t regs[2];
// Write accel offsets to DMP memory...
// TO-DO: why don't we write to main accel offset registries? i.e. MPUREG_XA_OFFS_H
aux_int = offsetX>>1; // Transform to internal units
regs[0]=(aux_int>>8)&0xFF;
regs[1]=aux_int&0xFF;
dmp_register_write(0x01,0x08,2,regs); // key KEY_D_1_8 Accel X offset
aux_int = offsetY>>1;
regs[0]=(aux_int>>8)&0xFF;
regs[1]=aux_int&0xFF;
dmp_register_write(0x01,0x0A,2,regs); // key KEY_D_1_10 Accel Y offset
aux_int = offsetZ>>1;
regs[0]=(aux_int>>8)&0xFF;
regs[1]=aux_int&0xFF;
dmp_register_write(0x01,0x02,2,regs); // key KEY_D_1_2 Accel Z offset
}
// dmp_register_write - method to write to dmp's registers
// the dmp is logically separated from the main mpu6000. To write a block of memory to the DMP's memory you
// write the "bank" and starting address into two of the main MPU's registers, then write the data one byte
// at a time into the MPUREG_MEM_R_W register
void AP_InertialSensor_MPU6000::dmp_register_write(uint8_t bank, uint8_t address, uint8_t num_bytes, uint8_t data[])
{
register_write(MPUREG_BANK_SEL,bank);
register_write(MPUREG_MEM_START_ADDR,address);
_spi->cs_assert();
_spi->transfer(MPUREG_MEM_R_W);
for (uint8_t i=0; i<num_bytes; i++) {
_spi->transfer(data[i]);
}
_spi->cs_release();
}
// MPU6000 DMP initialization
// this should be called after hardware_init if you wish to enable the dmp
void AP_InertialSensor_MPU6000::dmp_init()
{
uint8_t regs[4]; // for writing to dmp
// ensure we only initialise once
if( _dmp_initialised ) {
return;
}
// load initial values into DMP memory
dmp_load_mem();
dmp_set_gyro_calibration();
dmp_set_accel_calibration();
dmp_apply_endian_accel();
dmp_set_mpu_sensors();
dmp_set_bias_none();
dmp_set_fifo_interrupt();
dmp_send_quaternion(); // By default we only send the quaternion to the FIFO (18 bytes packet size)
dmp_set_fifo_rate(MPU6000_200HZ); // 200Hz DMP output rate
register_write(MPUREG_INT_ENABLE, BIT_RAW_RDY_EN | BIT_DMP_INT_EN ); // configure interrupts to fire only when new data arrives from DMP (in fifo buffer)
// Randy: no idea what this does
register_write(MPUREG_DMP_CFG_1, 0x03); //MPUREG_DMP_CFG_1, 0x03
register_write(MPUREG_DMP_CFG_2, 0x00); //MPUREG_DMP_CFG_2, 0x00
//inv_state_change_fifo
regs[0] = 0xFF;
regs[1] = 0xFF;
dmp_register_write(0x01, 0xB2, 0x02, regs); // D_1_178
// ?? FIFO ??
regs[0] = 0x09;
regs[1] = 0x23;
regs[2] = 0xA1;
regs[3] = 0x35;
dmp_register_write(0x01, 0x90, 0x04, regs); // D_1_144
//register_write(MPUREG_USER_CTRL, BIT_USER_CTRL_FIFO_RESET); //MPUREG_USER_CTRL, BIT_FIFO_RST
FIFO_reset();
FIFO_ready();
//register_write(MPUREG_USER_CTRL, 0x00); // MPUREG_USER_CTRL, 0. TO-DO: is all this setting of USER_CTRL really necessary?
register_write(MPUREG_USER_CTRL, BIT_USER_CTRL_FIFO_RESET); //MPUREG_USER_CTRL, BIT_FIFO_RST. TO-DO: replace this call with FIFO_reset()?
register_write(MPUREG_USER_CTRL, 0x00); // MPUREG_USER_CTRL: 0
register_write(MPUREG_USER_CTRL, BIT_USER_CTRL_DMP_EN | BIT_USER_CTRL_FIFO_EN | BIT_USER_CTRL_DMP_RESET);
// Set the gain of the accel in the sensor fusion
dmp_set_sensor_fusion_accel_gain(DEFAULT_ACCEL_FUSION_GAIN); // default value
// dmp initialisation complete
_dmp_initialised = true;
}
// dmp_reset - reset dmp (required for changes in gains or offsets to take effect)
void AP_InertialSensor_MPU6000::dmp_reset()
{
//uint8_t tmp = register_read(MPUREG_USER_CTRL);
//tmp |= BIT_USER_CTRL_DMP_RESET;
//register_write(MPUREG_USER_CTRL,tmp);
register_write(MPUREG_USER_CTRL, BIT_USER_CTRL_FIFO_RESET); //MPUREG_USER_CTRL, BIT_FIFO_RST. TO-DO: replace this call with FIFO_reset()?
register_write(MPUREG_USER_CTRL, 0x00); // MPUREG_USER_CTRL: 0
register_write(MPUREG_USER_CTRL, BIT_USER_CTRL_DMP_EN | BIT_USER_CTRL_FIFO_EN | BIT_USER_CTRL_DMP_RESET);
}
// New data packet in FIFO?
bool AP_InertialSensor_MPU6000::FIFO_ready()
{
_fifoCountH = _register_read(MPUREG_FIFO_COUNTH);
_fifoCountL = _register_read(MPUREG_FIFO_COUNTL);
if(_fifoCountL == FIFO_PACKET_SIZE) {
return 1;
}
else{
//We should not reach this point or maybe we have more than one packet (we should manage this!)
FIFO_reset();
return 0;
}
}
// FIFO_reset - reset/clear FIFO buffer used to capture attitude information from DMP
void AP_InertialSensor_MPU6000::FIFO_reset()
{
uint8_t temp;
temp = _register_read(MPUREG_USER_CTRL);
temp = temp | BIT_USER_CTRL_FIFO_RESET; // FIFO RESET BIT
register_write(MPUREG_USER_CTRL, temp);
}
// FIFO_getPacket - read an attitude packet from FIFO buffer
// TO-DO: interpret results instead of just dumping into a buffer
void AP_InertialSensor_MPU6000::FIFO_getPacket()
{
uint8_t i;
int16_t q_data[4];
uint8_t addr = MPUREG_FIFO_R_W | 0x80; // Set most significant bit to indicate a read
uint8_t received_packet[DMP_FIFO_BUFFER_SIZE]; // FIFO packet buffer
_spi->cs_assert();
_spi->transfer(addr); // send address we want to read from
for(i = 0; i < _fifoCountL; i++) {
received_packet[i] = _spi->transfer(0); // request value
}
_spi->cs_release();
// we are using 16 bits resolution
q_data[0] = (int16_t) ((((uint16_t) received_packet[0]) << 8) + ((uint16_t) received_packet[1]));
q_data[1] = (int16_t) ((((uint16_t) received_packet[4]) << 8) + ((uint16_t) received_packet[5]));
q_data[2] = (int16_t) ((((uint16_t) received_packet[8]) << 8) + ((uint16_t) received_packet[9]));
q_data[3] = (int16_t) ((((uint16_t) received_packet[12]) << 8) + ((uint16_t) received_packet[13]));
quaternion.q1 = ((float)q_data[0]) / 16384.0f; // convert from fixed point to float
quaternion.q2 = ((float)q_data[2]) / 16384.0f; // convert from fixed point to float
quaternion.q3 = ((float)q_data[1]) / 16384.0f; // convert from fixed point to float
quaternion.q4 = ((float)-q_data[3]) / 16384.0f; // convert from fixed point to float
}
// dmp_set_gyro_calibration - apply default gyro calibration FS=2000dps and default orientation
void AP_InertialSensor_MPU6000::dmp_set_gyro_calibration()
{
uint8_t regs[4];
regs[0]=0x4C;
regs[1]=0xCD;
regs[2]=0x6C;
dmp_register_write(0x03, 0x7B, 0x03, regs); //FCFG_1 inv_set_gyro_calibration
regs[0]=0x36;
regs[1]=0x56;
regs[2]=0x76;
dmp_register_write(0x03, 0xAB, 0x03, regs); //FCFG_3 inv_set_gyro_calibration
regs[0]=0x02;
regs[1]=0xCB;
regs[2]=0x47;
regs[3]=0xA2;
dmp_register_write(0x00, 0x68, 0x04, regs); //D_0_104 inv_set_gyro_calibration
regs[0]=0x00;
regs[1]=0x05;
regs[2]=0x8B;
regs[3]=0xC1;
dmp_register_write(0x02, 0x18, 0x04, regs); //D_0_24 inv_set_gyro_calibration
}
// dmp_set_accel_calibration - apply default accel calibration scale=8g and default orientation
void AP_InertialSensor_MPU6000::dmp_set_accel_calibration()
{
uint8_t regs[6];
regs[0]=0x00;
regs[1]=0x00;
regs[2]=0x00;
regs[3]=0x00;
dmp_register_write(0x01, 0x0C, 0x04, regs); //D_1_152 inv_set_accel_calibration
regs[0]=0x0C;
regs[1]=0xC9;
regs[2]=0x2C;
regs[3]=0x97;
regs[4]=0x97;
regs[5]=0x97;
dmp_register_write(0x03, 0x7F, 0x06, regs); //FCFG_2 inv_set_accel_calibration
regs[0]=0x26;
regs[1]=0x46;
regs[2]=0x66;
dmp_register_write(0x03, 0x89, 0x03, regs); //FCFG_7 inv_set_accel_calibration
// accel range, 0x20,0x00 => 2g, 0x10,0x00=>4g regs= (1073741824/accel_scale*65536)
//regs[0]=0x20; // 2g
regs[0]=0x08; // 8g
regs[1]=0x00;
dmp_register_write(0x00, 0x6C, 0x02, regs); //D_0_108 inv_set_accel_calibration
}
// dmp_apply_endian_accel - set byte order of accelerometer values?
void AP_InertialSensor_MPU6000::dmp_apply_endian_accel()
{
uint8_t regs[4];
regs[0]=0x00;
regs[1]=0x00;
regs[2]=0x40;
regs[3]=0x00;
dmp_register_write(0x01, 0xEC, 0x04, regs); //D_1_236 inv_apply_endian_accel
}
// dmp_set_mpu_sensors - to configure for SIX_AXIS output
void AP_InertialSensor_MPU6000::dmp_set_mpu_sensors()
{
uint8_t regs[6];
regs[0]=0x0C;
regs[1]=0xC9;
regs[2]=0x2C;
regs[3]=0x97;
regs[4]=0x97;
regs[5]=0x97;
dmp_register_write(0x03, 0x7F, 0x06, regs); //FCFG_2 inv_set_mpu_sensors(INV_SIX_AXIS_GYRO_ACCEL);
}
// dmp_set_bias_from_no_motion - turn on bias from no motion
void AP_InertialSensor_MPU6000::dmp_set_bias_from_no_motion()
{
uint8_t regs[4];
regs[0]=0x0D;
regs[1]=0x35;
regs[2]=0x5D;
dmp_register_write(0x04, 0x02, 0x03, regs); //CFG_MOTION_BIAS inv_turn_on_bias_from_no_motion
regs[0]=0x87;
regs[1]=0x2D;
regs[2]=0x35;
regs[3]=0x3D;
dmp_register_write(0x04, 0x09, 0x04, regs); //FCFG_5 inv_set_bias_update( INV_BIAS_FROM_NO_MOTION );
}
// dmp_set_bias_none - turn off internal bias correction (we will use this and we handle the gyro bias correction externally)
void AP_InertialSensor_MPU6000::dmp_set_bias_none()
{
uint8_t regs[4];
regs[0]=0x98;
regs[1]=0x98;
regs[2]=0x98;
dmp_register_write(0x04, 0x02, 0x03, regs); //CFG_MOTION_BIAS inv_turn_off_bias_from_no_motion
regs[0]=0x87;
regs[1]=0x2D;
regs[2]=0x35;
regs[3]=0x3D;
dmp_register_write(0x04, 0x09, 0x04, regs); //FCFG_5 inv_set_bias_update( INV_BIAS_FROM_NO_MOTION );
}
// dmp_set_fifo_interrupt
void AP_InertialSensor_MPU6000::dmp_set_fifo_interrupt()
{
uint8_t regs[1];
regs[0]=0xFE;
dmp_register_write(0x07, 0x86, 0x01, regs); //CFG_6 inv_set_fifo_interupt
}
// dmp_send_quaternion - send quaternion data to FIFO
void AP_InertialSensor_MPU6000::dmp_send_quaternion()
{
uint8_t regs[5];
regs[0]=0xF1;
regs[1]=0x20;
regs[2]=0x28;
regs[3]=0x30;
regs[4]=0x38;
dmp_register_write(0x07, 0x41, 0x05, regs); //CFG_8 inv_send_quaternion
regs[0]=0x30;
dmp_register_write(0x07, 0x7E, 0x01, regs); //CFG_16 inv_set_footer
}
// dmp_send_gyro - send gyro data to FIFO
void AP_InertialSensor_MPU6000::dmp_send_gyro()
{
uint8_t regs[4];
regs[0]=0xF1;
regs[1]=0x28;
regs[2]=0x30;
regs[3]=0x38;
dmp_register_write(0x07, 0x47, 0x04, regs); //CFG_9 inv_send_gyro
}
// dmp_send_accel - send accel data to FIFO
void AP_InertialSensor_MPU6000::dmp_send_accel()
{
uint8_t regs[54];
regs[0]=0xF1;
regs[1]=0x28;
regs[2]=0x30;
regs[3]=0x38;
dmp_register_write(0x07, 0x6C, 0x04, regs); //CFG_12 inv_send_accel
}
// This functions defines the rate at wich attitude data is send to FIFO
// Rate: 0 => SAMPLE_RATE(ex:200Hz), 1=> SAMPLE_RATE/2 (ex:100Hz), 2=> SAMPLE_RATE/3 (ex:66Hz)
// rate constant definitions in MPU6000.h
void AP_InertialSensor_MPU6000::dmp_set_fifo_rate(uint8_t rate)
{
uint8_t regs[2];
regs[0]=0x00;
regs[1]=rate;
dmp_register_write(0x02, 0x16, 0x02, regs); //D_0_22 inv_set_fifo_rate
}
// This function defines the weight of the accel on the sensor fusion
// default value is 0x80
// The official invensense name is inv_key_0_96 (??)
void AP_InertialSensor_MPU6000::dmp_set_sensor_fusion_accel_gain(uint8_t gain)
{
//inv_key_0_96
register_write(MPUREG_BANK_SEL,0x00);
register_write(MPUREG_MEM_START_ADDR, 0x60);
_spi->cs_assert();
_spi->transfer(MPUREG_MEM_R_W);
_spi->transfer(0x00);
_spi->transfer(gain); // Original : 0x80 To test: 0x40, 0x20 (too less)
_spi->transfer(0x00);
_spi->transfer(0x00);
_spi->cs_release();
}
// Load initial memory values into DMP memory banks
void AP_InertialSensor_MPU6000::dmp_load_mem()
{
for(int i = 0; i < 7; i++) {
register_write(MPUREG_BANK_SEL,i); //MPUREG_BANK_SEL
for(uint8_t j = 0; j < 16; j++) {
uint8_t start_addy = j * 0x10;
register_write(MPUREG_MEM_START_ADDR,start_addy);
_spi->cs_assert();
_spi->transfer(MPUREG_MEM_R_W);
for(int k = 0; k < 16; k++) {
uint8_t byteToSend = pgm_read_byte((const prog_char *)&(dmpMem[i][j][k]));
_spi->transfer((uint8_t) byteToSend);
}
_spi->cs_release();
}
}
register_write(MPUREG_BANK_SEL,7); //MPUREG_BANK_SEL
for(uint8_t j = 0; j < 8; j++) {
uint8_t start_addy = j * 0x10;
register_write(MPUREG_MEM_START_ADDR,start_addy);
_spi->cs_assert();
_spi->transfer(MPUREG_MEM_R_W);
for(int k = 0; k < 16; k++) {
uint8_t byteToSend = pgm_read_byte((const prog_char *)&(dmpMem[7][j][k]));
_spi->transfer((uint8_t) byteToSend);
}
_spi->cs_release();
}
register_write(MPUREG_MEM_START_ADDR,0x80);
_spi->cs_assert();
_spi->transfer(MPUREG_MEM_R_W);
for(int k = 0; k < 9; k++) {
uint8_t byteToSend = pgm_read_byte((const prog_char *)&(dmpMem[7][8][k]));
_spi->transfer((uint8_t) byteToSend);
}
_spi->cs_release();
}
// ========= DMP MEMORY ================================
const uint8_t dmpMem[8][16][16] PROGMEM = {
{
{
0xFB, 0x00, 0x00, 0x3E, 0x00, 0x0B, 0x00, 0x36, 0x00, 0x01, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00
}
,
{
0x00, 0x65, 0x00, 0x54, 0xFF, 0xEF, 0x00, 0x00, 0xFA, 0x80, 0x00, 0x0B, 0x12, 0x82, 0x00, 0x01
}
,
{
0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x28, 0x00, 0x00, 0xFF, 0xFF, 0x45, 0x81, 0xFF, 0xFF, 0xFA, 0x72, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x03, 0xE8, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x7F, 0xFF, 0xFF, 0xFE, 0x80, 0x01
}
,
{
0x00, 0x1B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x3E, 0x03, 0x30, 0x40, 0x00, 0x00, 0x00, 0x02, 0xCA, 0xE3, 0x09, 0x3E, 0x80, 0x00, 0x00
}
,
{
0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00
}
,
{
0x41, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0B, 0x2A, 0x00, 0x00, 0x16, 0x55, 0x00, 0x00, 0x21, 0x82
}
,
{
0xFD, 0x87, 0x26, 0x50, 0xFD, 0x80, 0x00, 0x00, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x05, 0x80, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00
}
,
{
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x6F, 0x00, 0x02, 0x65, 0x32, 0x00, 0x00, 0x5E, 0xC0
}
,
{
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0xFB, 0x8C, 0x6F, 0x5D, 0xFD, 0x5D, 0x08, 0xD9, 0x00, 0x7C, 0x73, 0x3B, 0x00, 0x6C, 0x12, 0xCC
}
,
{
0x32, 0x00, 0x13, 0x9D, 0x32, 0x00, 0xD0, 0xD6, 0x32, 0x00, 0x08, 0x00, 0x40, 0x00, 0x01, 0xF4
}
,
{
0xFF, 0xE6, 0x80, 0x79, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0xD0, 0xD6, 0x00, 0x00, 0x27, 0x10
}
}
,
{
{
0xFB, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0xFA, 0x36, 0xFF, 0xBC, 0x30, 0x8E, 0x00, 0x05, 0xFB, 0xF0, 0xFF, 0xD9, 0x5B, 0xC8
}
,
{
0xFF, 0xD0, 0x9A, 0xBE, 0x00, 0x00, 0x10, 0xA9, 0xFF, 0xF4, 0x1E, 0xB2, 0x00, 0xCE, 0xBB, 0xF7
}
,
{
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x02, 0x00, 0x02, 0x02, 0x00, 0x00, 0x0C
}
,
{
0xFF, 0xC2, 0x80, 0x00, 0x00, 0x01, 0x80, 0x00, 0x00, 0xCF, 0x80, 0x00, 0x40, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x14
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x03, 0x3F, 0x68, 0xB6, 0x79, 0x35, 0x28, 0xBC, 0xC6, 0x7E, 0xD1, 0x6C
}
,
{
0x80, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0xB2, 0x6A, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, 0xF0, 0x00, 0x00, 0x00, 0x30
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x25, 0x4D, 0x00, 0x2F, 0x70, 0x6D, 0x00, 0x00, 0x05, 0xAE, 0x00, 0x0C, 0x02, 0xD0
}
}
,
{
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x65, 0x00, 0x54, 0xFF, 0xEF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x01, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x01, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x65, 0x00, 0x00, 0x00, 0x54, 0x00, 0x00, 0xFF, 0xEF, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x1B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00
}
,
{
0x00, 0x1B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
,
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}
}
,
{
{
0xD8, 0xDC, 0xBA, 0xA2, 0xF1, 0xDE, 0xB2, 0xB8, 0xB4, 0xA8, 0x81, 0x91, 0xF7, 0x4A, 0x90, 0x7F
}
,
{
0x91, 0x6A, 0xF3, 0xF9, 0xDB, 0xA8, 0xF9, 0xB0, 0xBA, 0xA0, 0x80, 0xF2, 0xCE, 0x81, 0xF3, 0xC2
}
,
{
0xF1, 0xC1, 0xF2, 0xC3, 0xF3, 0xCC, 0xA2, 0xB2, 0x80, 0xF1, 0xC6, 0xD8, 0x80, 0xBA, 0xA7, 0xDF
}
,
{
0xDF, 0xDF, 0xF2, 0xA7, 0xC3, 0xCB, 0xC5, 0xB6, 0xF0, 0x87, 0xA2, 0x94, 0x24, 0x48, 0x70, 0x3C
}
,
{
0x95, 0x40, 0x68, 0x34, 0x58, 0x9B, 0x78, 0xA2, 0xF1, 0x83, 0x92, 0x2D, 0x55, 0x7D, 0xD8, 0xB1
}
,
{
0xB4, 0xB8, 0xA1, 0xD0, 0x91, 0x80, 0xF2, 0x70, 0xF3, 0x70, 0xF2, 0x7C, 0x80, 0xA8, 0xF1, 0x01
}
,
{
0xB0, 0x98, 0x87, 0xD9, 0x43, 0xD8, 0x86, 0xC9, 0x88, 0xBA, 0xA1, 0xF2, 0x0E, 0xB8, 0x97, 0x80
}
,
{
0xF1, 0xA9, 0xDF, 0xDF, 0xDF, 0xAA, 0xDF, 0xDF, 0xDF, 0xF2, 0xAA, 0xC5, 0xCD, 0xC7, 0xA9, 0x0C
}
,
{
0xC9, 0x2C, 0x97, 0x97, 0x97, 0x97, 0xF1, 0xA9, 0x89, 0x26, 0x46, 0x66, 0xB0, 0xB4, 0xBA, 0x80
}
,
{
0xAC, 0xDE, 0xF2, 0xCA, 0xF1, 0xB2, 0x8C, 0x02, 0xA9, 0xB6, 0x98, 0x00, 0x89, 0x0E, 0x16, 0x1E
}
,
{
0xB8, 0xA9, 0xB4, 0x99, 0x2C, 0x54, 0x7C, 0xB0, 0x8A, 0xA8, 0x96, 0x36, 0x56, 0x76, 0xF1, 0xB9
}
,
{
0xAF, 0xB4, 0xB0, 0x83, 0xC0, 0xB8, 0xA8, 0x97, 0x11, 0xB1, 0x8F, 0x98, 0xB9, 0xAF, 0xF0, 0x24
}
,
{
0x08, 0x44, 0x10, 0x64, 0x18, 0xF1, 0xA3, 0x29, 0x55, 0x7D, 0xAF, 0x83, 0xB5, 0x93, 0xAF, 0xF0
}
,
{
0x00, 0x28, 0x50, 0xF1, 0xA3, 0x86, 0x9F, 0x61, 0xA6, 0xDA, 0xDE, 0xDF, 0xD9, 0xFA, 0xA3, 0x86
}
,
{
0x96, 0xDB, 0x31, 0xA6, 0xD9, 0xF8, 0xDF, 0xBA, 0xA6, 0x8F, 0xC2, 0xC5, 0xC7, 0xB2, 0x8C, 0xC1
}
,
{
0xB8, 0xA2, 0xDF, 0xDF, 0xDF, 0xA3, 0xDF, 0xDF, 0xDF, 0xD8, 0xD8, 0xF1, 0xB8, 0xA8, 0xB2, 0x86
}
}
,
{
{
0xB4, 0x98, 0x0D, 0x35, 0x5D, 0xB8, 0xAA, 0x98, 0xB0, 0x87, 0x2D, 0x35, 0x3D, 0xB2, 0xB6, 0xBA
}
,
{
0xAF, 0x8C, 0x96, 0x19, 0x8F, 0x9F, 0xA7, 0x0E, 0x16, 0x1E, 0xB4, 0x9A, 0xB8, 0xAA, 0x87, 0x2C
}
,
{
0x54, 0x7C, 0xB9, 0xA3, 0xDE, 0xDF, 0xDF, 0xA3, 0xB1, 0x80, 0xF2, 0xC4, 0xCD, 0xC9, 0xF1, 0xB8
}
,
{
0xA9, 0xB4, 0x99, 0x83, 0x0D, 0x35, 0x5D, 0x89, 0xB9, 0xA3, 0x2D, 0x55, 0x7D, 0xB5, 0x93, 0xA3
}
,
{
0x0E, 0x16, 0x1E, 0xA9, 0x2C, 0x54, 0x7C, 0xB8, 0xB4, 0xB0, 0xF1, 0x97, 0x83, 0xA8, 0x11, 0x84
}
,
{
0xA5, 0x09, 0x98, 0xA3, 0x83, 0xF0, 0xDA, 0x24, 0x08, 0x44, 0x10, 0x64, 0x18, 0xD8, 0xF1, 0xA5
}
,
{
0x29, 0x55, 0x7D, 0xA5, 0x85, 0x95, 0x02, 0x1A, 0x2E, 0x3A, 0x56, 0x5A, 0x40, 0x48, 0xF9, 0xF3
}
,
{
0xA3, 0xD9, 0xF8, 0xF0, 0x98, 0x83, 0x24, 0x08, 0x44, 0x10, 0x64, 0x18, 0x97, 0x82, 0xA8, 0xF1
}
,
{
0x11, 0xF0, 0x98, 0xA2, 0x24, 0x08, 0x44, 0x10, 0x64, 0x18, 0xDA, 0xF3, 0xDE, 0xD8, 0x83, 0xA5
}
,
{
0x94, 0x01, 0xD9, 0xA3, 0x02, 0xF1, 0xA2, 0xC3, 0xC5, 0xC7, 0xD8, 0xF1, 0x84, 0x92, 0xA2, 0x4D
}
,
{
0xDA, 0x2A, 0xD8, 0x48, 0x69, 0xD9, 0x2A, 0xD8, 0x68, 0x55, 0xDA, 0x32, 0xD8, 0x50, 0x71, 0xD9
}
,
{
0x32, 0xD8, 0x70, 0x5D, 0xDA, 0x3A, 0xD8, 0x58, 0x79, 0xD9, 0x3A, 0xD8, 0x78, 0x93, 0xA3, 0x4D
}
,
{
0xDA, 0x2A, 0xD8, 0x48, 0x69, 0xD9, 0x2A, 0xD8, 0x68, 0x55, 0xDA, 0x32, 0xD8, 0x50, 0x71, 0xD9
}
,
{
0x32, 0xD8, 0x70, 0x5D, 0xDA, 0x3A, 0xD8, 0x58, 0x79, 0xD9, 0x3A, 0xD8, 0x78, 0xA8, 0x8A, 0x9A
}
,
{
0xF0, 0x28, 0x50, 0x78, 0x9E, 0xF3, 0x88, 0x18, 0xF1, 0x9F, 0x1D, 0x98, 0xA8, 0xD9, 0x08, 0xD8
}
,
{
0xC8, 0x9F, 0x12, 0x9E, 0xF3, 0x15, 0xA8, 0xDA, 0x12, 0x10, 0xD8, 0xF1, 0xAF, 0xC8, 0x97, 0x87
}
}
,
{
{
0x34, 0xB5, 0xB9, 0x94, 0xA4, 0x21, 0xF3, 0xD9, 0x22, 0xD8, 0xF2, 0x2D, 0xF3, 0xD9, 0x2A, 0xD8
}
,
{
0xF2, 0x35, 0xF3, 0xD9, 0x32, 0xD8, 0x81, 0xA4, 0x60, 0x60, 0x61, 0xD9, 0x61, 0xD8, 0x6C, 0x68
}
,
{
0x69, 0xD9, 0x69, 0xD8, 0x74, 0x70, 0x71, 0xD9, 0x71, 0xD8, 0xB1, 0xA3, 0x84, 0x19, 0x3D, 0x5D
}
,
{
0xA3, 0x83, 0x1A, 0x3E, 0x5E, 0x93, 0x10, 0x30, 0x81, 0x10, 0x11, 0xB8, 0xB0, 0xAF, 0x8F, 0x94
}
,
{
0xF2, 0xDA, 0x3E, 0xD8, 0xB4, 0x9A, 0xA8, 0x87, 0x29, 0xDA, 0xF8, 0xD8, 0x87, 0x9A, 0x35, 0xDA
}
,
{
0xF8, 0xD8, 0x87, 0x9A, 0x3D, 0xDA, 0xF8, 0xD8, 0xB1, 0xB9, 0xA4, 0x98, 0x85, 0x02, 0x2E, 0x56
}
,
{
0xA5, 0x81, 0x00, 0x0C, 0x14, 0xA3, 0x97, 0xB0, 0x8A, 0xF1, 0x2D, 0xD9, 0x28, 0xD8, 0x4D, 0xD9
}
,
{
0x48, 0xD8, 0x6D, 0xD9, 0x68, 0xD8, 0xB1, 0x84, 0x0D, 0xDA, 0x0E, 0xD8, 0xA3, 0x29, 0x83, 0xDA
}
,
{
0x2C, 0x0E, 0xD8, 0xA3, 0x84, 0x49, 0x83, 0xDA, 0x2C, 0x4C, 0x0E, 0xD8, 0xB8, 0xB0, 0xA8, 0x8A
}
,
{
0x9A, 0xF5, 0x20, 0xAA, 0xDA, 0xDF, 0xD8, 0xA8, 0x40, 0xAA, 0xD0, 0xDA, 0xDE, 0xD8, 0xA8, 0x60
}
,
{
0xAA, 0xDA, 0xD0, 0xDF, 0xD8, 0xF1, 0x97, 0x86, 0xA8, 0x31, 0x9B, 0x06, 0x99, 0x07, 0xAB, 0x97
}
,
{
0x28, 0x88, 0x9B, 0xF0, 0x0C, 0x20, 0x14, 0x40, 0xB8, 0xB0, 0xB4, 0xA8, 0x8C, 0x9C, 0xF0, 0x04
}
,
{
0x28, 0x51, 0x79, 0x1D, 0x30, 0x14, 0x38, 0xB2, 0x82, 0xAB, 0xD0, 0x98, 0x2C, 0x50, 0x50, 0x78
}
,
{
0x78, 0x9B, 0xF1, 0x1A, 0xB0, 0xF0, 0x8A, 0x9C, 0xA8, 0x29, 0x51, 0x79, 0x8B, 0x29, 0x51, 0x79
}
,
{
0x8A, 0x24, 0x70, 0x59, 0x8B, 0x20, 0x58, 0x71, 0x8A, 0x44, 0x69, 0x38, 0x8B, 0x39, 0x40, 0x68
}
,
{
0x8A, 0x64, 0x48, 0x31, 0x8B, 0x30, 0x49, 0x60, 0xA5, 0x88, 0x20, 0x09, 0x71, 0x58, 0x44, 0x68
}
}
,
{
{
0x11, 0x39, 0x64, 0x49, 0x30, 0x19, 0xF1, 0xAC, 0x00, 0x2C, 0x54, 0x7C, 0xF0, 0x8C, 0xA8, 0x04
}
,
{
0x28, 0x50, 0x78, 0xF1, 0x88, 0x97, 0x26, 0xA8, 0x59, 0x98, 0xAC, 0x8C, 0x02, 0x26, 0x46, 0x66
}
,
{
0xF0, 0x89, 0x9C, 0xA8, 0x29, 0x51, 0x79, 0x24, 0x70, 0x59, 0x44, 0x69, 0x38, 0x64, 0x48, 0x31
}
,
{
0xA9, 0x88, 0x09, 0x20, 0x59, 0x70, 0xAB, 0x11, 0x38, 0x40, 0x69, 0xA8, 0x19, 0x31, 0x48, 0x60
}
,
{
0x8C, 0xA8, 0x3C, 0x41, 0x5C, 0x20, 0x7C, 0x00, 0xF1, 0x87, 0x98, 0x19, 0x86, 0xA8, 0x6E, 0x76
}
,
{
0x7E, 0xA9, 0x99, 0x88, 0x2D, 0x55, 0x7D, 0x9E, 0xB9, 0xA3, 0x8A, 0x22, 0x8A, 0x6E, 0x8A, 0x56
}
,
{
0x8A, 0x5E, 0x9F, 0xB1, 0x83, 0x06, 0x26, 0x46, 0x66, 0x0E, 0x2E, 0x4E, 0x6E, 0x9D, 0xB8, 0xAD
}
,
{
0x00, 0x2C, 0x54, 0x7C, 0xF2, 0xB1, 0x8C, 0xB4, 0x99, 0xB9, 0xA3, 0x2D, 0x55, 0x7D, 0x81, 0x91
}
,
{
0xAC, 0x38, 0xAD, 0x3A, 0xB5, 0x83, 0x91, 0xAC, 0x2D, 0xD9, 0x28, 0xD8, 0x4D, 0xD9, 0x48, 0xD8
}
,
{
0x6D, 0xD9, 0x68, 0xD8, 0x8C, 0x9D, 0xAE, 0x29, 0xD9, 0x04, 0xAE, 0xD8, 0x51, 0xD9, 0x04, 0xAE
}
,
{
0xD8, 0x79, 0xD9, 0x04, 0xD8, 0x81, 0xF3, 0x9D, 0xAD, 0x00, 0x8D, 0xAE, 0x19, 0x81, 0xAD, 0xD9
}
,
{
0x01, 0xD8, 0xF2, 0xAE, 0xDA, 0x26, 0xD8, 0x8E, 0x91, 0x29, 0x83, 0xA7, 0xD9, 0xAD, 0xAD, 0xAD
}
,
{
0xAD, 0xF3, 0x2A, 0xD8, 0xD8, 0xF1, 0xB0, 0xAC, 0x89, 0x91, 0x3E, 0x5E, 0x76, 0xF3, 0xAC, 0x2E
}
,
{
0x2E, 0xF1, 0xB1, 0x8C, 0x5A, 0x9C, 0xAC, 0x2C, 0x28, 0x28, 0x28, 0x9C, 0xAC, 0x30, 0x18, 0xA8
}
,
{
0x98, 0x81, 0x28, 0x34, 0x3C, 0x97, 0x24, 0xA7, 0x28, 0x34, 0x3C, 0x9C, 0x24, 0xF2, 0xB0, 0x89
}
,
{
0xAC, 0x91, 0x2C, 0x4C, 0x6C, 0x8A, 0x9B, 0x2D, 0xD9, 0xD8, 0xD8, 0x51, 0xD9, 0xD8, 0xD8, 0x79
}
}
,
{
{
0xD9, 0xD8, 0xD8, 0xF1, 0x9E, 0x88, 0xA3, 0x31, 0xDA, 0xD8, 0xD8, 0x91, 0x2D, 0xD9, 0x28, 0xD8
}
,
{
0x4D, 0xD9, 0x48, 0xD8, 0x6D, 0xD9, 0x68, 0xD8, 0xB1, 0x83, 0x93, 0x35, 0x3D, 0x80, 0x25, 0xDA
}
,
{
0xD8, 0xD8, 0x85, 0x69, 0xDA, 0xD8, 0xD8, 0xB4, 0x93, 0x81, 0xA3, 0x28, 0x34, 0x3C, 0xF3, 0xAB
}
,
{
0x8B, 0xF8, 0xA3, 0x91, 0xB6, 0x09, 0xB4, 0xD9, 0xAB, 0xDE, 0xFA, 0xB0, 0x87, 0x9C, 0xB9, 0xA3
}
,
{
0xDD, 0xF1, 0xA3, 0xA3, 0xA3, 0xA3, 0x95, 0xF1, 0xA3, 0xA3, 0xA3, 0x9D, 0xF1, 0xA3, 0xA3, 0xA3
}
,
{
0xA3, 0xF2, 0xA3, 0xB4, 0x90, 0x80, 0xF2, 0xA3, 0xA3, 0xA3, 0xA3, 0xA3, 0xA3, 0xA3, 0xA3, 0xA3
}
,
{
0xA3, 0xB2, 0xA3, 0xA3, 0xA3, 0xA3, 0xA3, 0xA3, 0xB0, 0x87, 0xB5, 0x99, 0xF1, 0xA3, 0xA3, 0xA3
}
,
{
0x98, 0xF1, 0xA3, 0xA3, 0xA3, 0xA3, 0x97, 0xA3, 0xA3, 0xA3, 0xA3, 0xF3, 0x9B, 0xA3, 0xA3, 0xDC
}
,
{
0xB9, 0xA7, 0xF1, 0x26, 0x26, 0x26, 0xD8, 0xD8, 0xFF
}
}
};
| andybarry/ardupilot | libraries/AP_InertialSensor/AP_InertialSensor_MPU6000.cpp | C++ | gpl-3.0 | 60,871 |
<?php
/**
* @package Mautic
* @copyright 2014 Mautic Contributors. All rights reserved.
* @author Mautic
* @link http://mautic.org
* @license GNU/GPLv3 http://www.gnu.org/licenses/gpl-3.0.html
*/
namespace MauticPlugin\MauticCrmBundle;
use Mautic\PluginBundle\Bundle\PluginBundleBase;
/**
* Class MauticCrmBundle
*
* @package MauticPlugin\MauticCrmBundle
*/
class MauticCrmBundle extends PluginBundleBase
{
} | viniciusferreira/mautic | plugins/MauticCrmBundle/MauticCrmBundle.php | PHP | gpl-3.0 | 444 |
<?php
namespace Neos\ContentRepository\Tests\Functional\Domain\Fixtures;
/*
* This file is part of the Neos.ContentRepository package.
*
* (c) Contributors of the Neos Project - www.neos.io
*
* This package is Open Source Software. For the full copyright and license
* information, please view the LICENSE file which was distributed with this
* source code.
*/
use Neos\ContentRepository\Domain\Model\Node;
/**
* A happier node than the default node that can clap hands to show it!
*/
class HappyNode extends Node
{
/**
* @return string
*/
public function clapsHands()
{
return $this->getName() . ' claps hands!';
}
}
| neos/typo3cr | Tests/Functional/Domain/Fixtures/HappyNode.php | PHP | gpl-3.0 | 665 |
/*=====================================================================
QGroundControl Open Source Ground Control Station
(c) 2009 - 2014 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
This file is part of the QGROUNDCONTROL project
QGROUNDCONTROL is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
QGROUNDCONTROL is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with QGROUNDCONTROL. If not, see <http://www.gnu.org/licenses/>.
======================================================================*/
/// @file
/// @brief Command line option parser
///
/// @author Don Gagne <don@thegagnes.com>
#include "CmdLineOptParser.h"
#include <QString>
/// @brief Implements a simple command line parser which sets booleans to true if the option is found.
void ParseCmdLineOptions(int& argc, ///< count of arguments in argv
char* argv[], ///< command line arguments
CmdLineOpt_t* prgOpts, ///< command line options
size_t cOpts, ///< count of command line options
bool removeParsedOptions) ///< true: remove parsed option from argc/argv
{
// Start with all options off
for (size_t iOption=0; iOption<cOpts; iOption++) {
*prgOpts[iOption].optionFound = false;
}
for (int iArg=1; iArg<argc; iArg++) {
for (size_t iOption=0; iOption<cOpts; iOption++) {
bool found = false;
QString arg(argv[iArg]);
QString optionStr(prgOpts[iOption].optionStr);
if (arg.startsWith(QString("%1:").arg(optionStr), Qt::CaseInsensitive)) {
found = true;
prgOpts[iOption].optionArg = arg.right(arg.length() - (optionStr.length() + 1));
} else if (arg.compare(optionStr, Qt::CaseInsensitive) == 0) {
found = true;
}
if (found) {
*prgOpts[iOption].optionFound = true;
if (removeParsedOptions) {
for (int iShift=iArg; iShift<argc-1; iShift++) {
argv[iShift] = argv[iShift+1];
}
argc--;
iArg--;
}
}
}
}
}
| caoxiongkun/qgroundcontrol | src/CmdLineOptParser.cc | C++ | agpl-3.0 | 2,813 |