content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
PHP | PHP | add dsn parsing to staticconfigtrait | 2ee6448a32f31e3c47b8044c50d7d8af151d6069 | <ide><path>src/Core/StaticConfigTrait.php
<ide> public static function config($key, $config = null) {
<ide> if (isset(static::$_config[$key])) {
<ide> throw new BadMethodCallException(sprintf('Cannot reconfigure existing key "%s"', $key));
<ide> }
<del> if (is_object($config)) {
<add> if (is_array($config)) {
<add> $config = static::parseDsn($config);
<add> } elseif ($config === null && is_array($key)) {
<add> foreach ($key as $name => $settings) {
<add> $key[$name] = static::parseDsn($settings);
<add> }
<add> } elseif (is_object($config)) {
<ide> $config = ['className' => $config];
<ide> }
<ide> if (isset($config['engine']) && empty($config['className'])) {
<ide> public static function configured() {
<ide> return array_keys(static::$_config);
<ide> }
<ide>
<add>/**
<add> * Parses a dsn into a valid connection configuration
<add> *
<add> * This method allows setting a dsn using PEAR::DB formatting, with added support for drivers
<add> * in the SQLAlchemy format. The following is an example of it's usage:
<add> *
<add> * {{{
<add> * $dsn = 'mysql+Cake\Database\Driver\Mysql://user:password@localhost:3306/database_name';
<add> * $config = ConnectionManager::parseDsn($dsn);
<add> * }}
<add> *
<add> * If an array is given, the parsed dsn will be merged into this array. Note that querystring
<add> * arguments are also parsed and set as values in the returned configuration.
<add> *
<add> * @param array $key An array with a `dsn` key mapping to a string dsn
<add> * @return mixed null when adding configuration and an array of configuration data when reading.
<add> */
<add> public static function parseDsn($config) {
<add> if (!is_array($config) || !isset($config['dsn'])) {
<add> return $config;
<add> }
<add>
<add> $driver = null;
<add> $dsn = $config['dsn'];
<add> unset($config['dsn']);
<add>
<add> if (preg_match("/^([\w]+)\+([\w\\\]+)/", $dsn, $matches)) {
<add> $scheme = $matches[1];
<add> $driver = $matches[2];
<add> $dsn = preg_replace("/^([\w]+)\+([\w\\\]+)/", $scheme, $dsn);
<add> }
<add>
<add> $parsed = parse_url($dsn);
<add> $query = '';
<add>
<add> if (isset($parsed['query'])) {
<add> $query = $parsed['query'];
<add> unset($parsed['query']);
<add> }
<add>
<add> parse_str($query, $queryArgs);
<add>
<add> if ($driver !== null) {
<add> $queryArgs['driver'] = $driver;
<add> }
<add>
<add> $config = array_merge($queryArgs, $parsed, $config);
<add>
<add> foreach ($config as $key => $value) {
<add> if ($value === 'true') {
<add> $config[$key] = true;
<add> } elseif ($value === 'false') {
<add> $config[$key] = false;
<add> }
<add> }
<add>
<add> return $config;
<add> }
<add>
<ide> } | 1 |
Go | Go | remove unused imports | 276d2bbf1d400415bec8d4652ac61e570b9206e3 | <ide><path>term/term.go
<ide> package term
<ide>
<ide> import (
<del> "fmt"
<del> "io"
<ide> "os"
<ide> "os/signal"
<ide> "syscall" | 1 |
Go | Go | log stderr on failures | 63f9c7784b7c6a726c8c668d68f9c8cb13e19ffb | <ide><path>builder/dockerfile/evaluator_test.go
<ide> package dockerfile // import "github.com/docker/docker/builder/dockerfile"
<ide>
<ide> import (
<ide> "os"
<add> "runtime"
<ide> "testing"
<ide>
<ide> "github.com/docker/docker/builder/remotecontext"
<ide> func initDispatchTestCases() []dispatchTestCase {
<ide> }
<ide>
<ide> func TestDispatch(t *testing.T) {
<del> skip.If(t, os.Getuid() != 0, "skipping test that requires root")
<add> if runtime.GOOS != "windows" {
<add> skip.If(t, os.Getuid() != 0, "skipping test that requires root")
<add> }
<ide> testCases := initDispatchTestCases()
<ide>
<ide> for _, testCase := range testCases {
<ide><path>builder/dockerfile/internals_test.go
<ide> func TestDockerfileOutsideTheBuildContext(t *testing.T) {
<ide> defer cleanup()
<ide>
<ide> expectedError := "Forbidden path outside the build context: ../../Dockerfile ()"
<add> if runtime.GOOS == "windows" {
<add> expectedError = "failed to resolve scoped path ../../Dockerfile ()"
<add> }
<ide>
<ide> readAndCheckDockerfile(t, "DockerfileOutsideTheBuildContext", contextDir, "../../Dockerfile", expectedError)
<ide> }
<ide> func TestNonExistingDockerfile(t *testing.T) {
<ide> }
<ide>
<ide> func readAndCheckDockerfile(t *testing.T, testName, contextDir, dockerfilePath, expectedError string) {
<del> skip.If(t, os.Getuid() != 0, "skipping test that requires root")
<add> if runtime.GOOS != "windows" {
<add> skip.If(t, os.Getuid() != 0, "skipping test that requires root")
<add> }
<ide> tarStream, err := archive.Tar(contextDir, archive.Uncompressed)
<ide> assert.NilError(t, err)
<ide>
<ide> func readAndCheckDockerfile(t *testing.T, testName, contextDir, dockerfilePath,
<ide> Source: tarStream,
<ide> }
<ide> _, _, err = remotecontext.Detect(config)
<del> assert.Check(t, is.Error(err, expectedError))
<add> assert.Check(t, is.ErrorContains(err, expectedError))
<ide> }
<ide>
<ide> func TestCopyRunConfig(t *testing.T) {
<ide><path>daemon/graphdriver/lcow/lcow_svm.go
<ide> package lcow // import "github.com/docker/docker/daemon/graphdriver/lcow"
<ide>
<ide> import (
<ide> "bytes"
<del> "errors"
<ide> "fmt"
<ide> "io"
<ide> "strings"
<ide> import (
<ide>
<ide> "github.com/Microsoft/hcsshim"
<ide> "github.com/Microsoft/opengcs/client"
<add> "github.com/pkg/errors"
<ide> "github.com/sirupsen/logrus"
<ide> )
<ide>
<ide> func (svm *serviceVM) createUnionMount(mountName string, mvds ...hcsshim.MappedV
<ide> }
<ide>
<ide> logrus.Debugf("Doing the overlay mount with union directory=%s", mountName)
<del> if err = svm.runProcess(fmt.Sprintf("mkdir -p %s", mountName), nil, nil, nil); err != nil {
<del> return err
<add> errOut := &bytes.Buffer{}
<add> if err = svm.runProcess(fmt.Sprintf("mkdir -p %s", mountName), nil, nil, errOut); err != nil {
<add> return errors.Wrapf(err, "mkdir -p %s failed (%s)", mountName, errOut.String())
<ide> }
<ide>
<ide> var cmd string
<ide> func (svm *serviceVM) createUnionMount(mountName string, mvds ...hcsshim.MappedV
<ide> upper := fmt.Sprintf("%s/upper", svm.getShortContainerPath(&mvds[0]))
<ide> work := fmt.Sprintf("%s/work", svm.getShortContainerPath(&mvds[0]))
<ide>
<del> if err = svm.runProcess(fmt.Sprintf("mkdir -p %s %s", upper, work), nil, nil, nil); err != nil {
<del> return err
<add> errOut := &bytes.Buffer{}
<add> if err = svm.runProcess(fmt.Sprintf("mkdir -p %s %s", upper, work), nil, nil, errOut); err != nil {
<add> return errors.Wrapf(err, "mkdir -p %s failed (%s)", mountName, errOut.String())
<ide> }
<ide>
<ide> cmd = fmt.Sprintf("mount -t overlay overlay -olowerdir=%s,upperdir=%s,workdir=%s %s",
<ide> func (svm *serviceVM) createUnionMount(mountName string, mvds ...hcsshim.MappedV
<ide> }
<ide>
<ide> logrus.Debugf("createUnionMount: Executing mount=%s", cmd)
<del> if err = svm.runProcess(cmd, nil, nil, nil); err != nil {
<del> return err
<add> errOut = &bytes.Buffer{}
<add> if err = svm.runProcess(cmd, nil, nil, errOut); err != nil {
<add> return errors.Wrapf(err, "%s failed (%s)", cmd, errOut.String())
<ide> }
<ide>
<ide> svm.unionMounts[mountName] = 1 | 3 |
Ruby | Ruby | use test dsl | b1241bb3a1971bb0cdba2e0f1fef6ad046b5f109 | <ide><path>Library/Homebrew/cmd/create.rb
<ide> def install
<ide> system "make install" # if this fails, try separate make/make install steps
<ide> end
<ide>
<del> def test
<add> test do
<add> # `test do` will create, run in and delete a temporary directory.
<add> #
<ide> # This test will fail and we won't accept that! It's enough to just replace
<ide> # "false" with the main program this formula installs, but it'd be nice if you
<ide> # were more thorough. Run the test with `brew test #{name}`. | 1 |
Javascript | Javascript | remove some redundant imports | 993b78de0bab6024a8ac48ff6502e7e409b10710 | <ide><path>examples/with-three-js/pages/_app.js
<del>import React from 'react'
<ide> import './index.css'
<ide>
<ide> function MyApp({ Component, pageProps }) {
<ide><path>examples/with-three-js/pages/birds.js
<del>import React, { useRef, useState, useEffect, Suspense } from 'react'
<add>import { useRef, useState, useEffect, Suspense } from 'react'
<ide> import * as THREE from 'three'
<ide> import { Canvas, useFrame, useLoader } from 'react-three-fiber'
<ide>
<ide><path>examples/with-three-js/pages/boxes.js
<del>import React, { useRef, useState, Suspense } from 'react'
<add>import { useRef, useState, Suspense } from 'react'
<ide> import { Canvas, useFrame } from 'react-three-fiber'
<ide>
<ide> const Box = (props) => {
<ide><path>examples/with-three-js/pages/index.js
<del>import React from 'react'
<ide> import Link from 'next/link'
<ide>
<ide> const Index = () => {
<ide><path>examples/with-typestyle/pages/index.js
<del>import React from 'react'
<ide> import { style } from 'typestyle'
<ide>
<ide> const className = style({ color: 'red' })
<ide><path>examples/with-universal-configuration-runtime/pages/index.js
<del>import React from 'react'
<ide> import getConfig from 'next/config'
<ide>
<ide> const { publicRuntimeConfig } = getConfig()
<ide><path>examples/with-videojs/components/Player.js
<del>import React, { Component } from 'react'
<add>import { Component } from 'react'
<ide> import videojs from 'video.js'
<ide> import 'videojs-youtube'
<ide>
<ide><path>examples/with-videojs/pages/index.js
<del>import React from 'react'
<add>import { Component } from 'react'
<ide> import Player from '../components/Player'
<ide>
<del>export default class Index extends React.Component {
<add>export default class Index extends Component {
<ide> render() {
<ide> const videoJsOptions = {
<ide> techOrder: ['youtube'],
<ide><path>examples/with-why-did-you-render/components/header.js
<del>import React, { useState, useEffect } from 'react'
<add>import { useState, useEffect } from 'react'
<ide>
<ide> const Header = () => {
<ide> const [objState, setObjState] = useState({ name: 'World' })
<ide><path>examples/with-yarn-workspaces/packages/bar/index.js
<del>import React from 'react'
<del>
<ide> const Bar = () => <strong>bar</strong>
<ide>
<ide> export default Bar
<ide><path>examples/with-zeit-fetch/pages/preact.js
<del>import React from 'react'
<ide> import Link from 'next/link'
<ide> import fetch from '../fetch'
<ide> | 11 |
Javascript | Javascript | add test for setloop (looponce,looprepeat) | cc8d54c1a81e350f89d094a42e342508ec476fd0 | <ide><path>test/unit/src/animation/AnimationAction.tests.js
<ide> import { AnimationMixer } from '../../../../src/animation/AnimationMixer';
<ide> import { AnimationClip } from '../../../../src/animation/AnimationClip';
<ide> import { NumberKeyframeTrack } from '../../../../src/animation/tracks/NumberKeyframeTrack';
<ide> import { Object3D } from '../../../../src/core/Object3D';
<add>import { LoopOnce, LoopRepeat, LoopPingPong } from '../../../../src/constants';
<ide>
<ide>
<ide> function createAnimation(){
<ide> export default QUnit.module( 'Animation', () => {
<ide>
<ide> } );
<ide>
<del> QUnit.todo( "setLoop", ( assert ) => {
<del>
<del> assert.ok( false, "everything's gonna be alright" );
<add> QUnit.test( "setLoop LoopOnce", ( assert ) => {
<ide>
<add> var {mixer,animationAction} = createAnimation();
<add> animationAction.setLoop(LoopOnce);
<add> animationAction.play();
<add> assert.ok( animationAction.isRunning(), "When an animation is started, it is running." );
<add> mixer.update(500);
<add> assert.ok( animationAction.isRunning(), "When an animation is in the first loop, it is running." );
<add> mixer.update(500);
<add> assert.notOk( animationAction.isRunning(), "When an animation is ended, it is not running." );
<add> mixer.update(500);
<add> assert.notOk( animationAction.isRunning(), "When an animation is ended, it is not running." );
<add>
<add> } );
<add>
<add> QUnit.test( "setLoop LoopRepeat", ( assert ) => {
<add>
<add> var {mixer,animationAction} = createAnimation();
<add> animationAction.setLoop(LoopRepeat,3);
<add> animationAction.play();
<add> assert.ok( animationAction.isRunning(), "When an animation is started, it is running." );
<add> mixer.update(500);
<add> assert.ok( animationAction.isRunning(), "When an animation is in the first loop, it is running." );
<add> mixer.update(1000);
<add> assert.ok( animationAction.isRunning(), "When an animation is in second loop when in looprepeat 3 times, it is running." );
<add> mixer.update(1000);
<add> assert.ok( animationAction.isRunning(), "When an animation is in third loop when in looprepeat 3 times, it is running." );
<add> mixer.update(1000);
<add> assert.notOk( animationAction.isRunning(), "When an animation ended his third loop when in looprepeat 3 times, it is not running anymore." );
<add> mixer.update(1000);
<add> assert.notOk( animationAction.isRunning(), "When an animation ended his third loop when in looprepeat 3 times, it stays not running anymore." );
<add>
<ide> } );
<ide>
<ide> QUnit.todo( "setEffectiveWeight", ( assert ) => { | 1 |
Python | Python | add unit tests for oracleoperator | 3235670b058681c896ac107e13b5218e9ca930c7 | <ide><path>tests/providers/oracle/operators/__init__.py
<add># Licensed to the Apache Software Foundation (ASF) under one
<add># or more contributor license agreements. See the NOTICE file
<add># distributed with this work for additional information
<add># regarding copyright ownership. The ASF licenses this file
<add># to you under the Apache License, Version 2.0 (the
<add># "License"); you may not use this file except in compliance
<add># with the License. You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing,
<add># software distributed under the License is distributed on an
<add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<add># KIND, either express or implied. See the License for the
<add># specific language governing permissions and limitations
<add># under the License.
<ide><path>tests/providers/oracle/operators/test_oracle.py
<add># Licensed to the Apache Software Foundation (ASF) under one
<add># or more contributor license agreements. See the NOTICE file
<add># distributed with this work for additional information
<add># regarding copyright ownership. The ASF licenses this file
<add># to you under the Apache License, Version 2.0 (the
<add># "License"); you may not use this file except in compliance
<add># with the License. You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing,
<add># software distributed under the License is distributed on an
<add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<add># KIND, either express or implied. See the License for the
<add># specific language governing permissions and limitations
<add># under the License.
<add>
<add>import unittest
<add>
<add>import mock
<add>
<add>from airflow.providers.oracle.hooks.oracle import OracleHook
<add>from airflow.providers.oracle.operators.oracle import OracleOperator
<add>
<add>
<add>class TestOracleOperator(unittest.TestCase):
<add> @mock.patch.object(OracleHook, 'run')
<add> def test_execute(self, mock_run):
<add> sql = 'SELECT * FROM test_table'
<add> oracle_conn_id = 'oracle_default'
<add> parameters = {'parameter': 'value'}
<add> autocommit = False
<add> context = "test_context"
<add> task_id = "test_task_id"
<add>
<add> operator = OracleOperator(sql=sql, oracle_conn_id=oracle_conn_id, parameters=parameters,
<add> autocommit=autocommit, task_id=task_id)
<add> operator.execute(context=context)
<add>
<add> mock_run.assert_called_once_with(sql, autocommit=autocommit, parameters=parameters)
<ide><path>tests/test_project_structure.py
<ide> 'tests/providers/jenkins/hooks/test_jenkins.py',
<ide> 'tests/providers/microsoft/azure/sensors/test_azure_cosmos.py',
<ide> 'tests/providers/microsoft/mssql/hooks/test_mssql.py',
<del> 'tests/providers/oracle/operators/test_oracle.py',
<ide> 'tests/providers/qubole/hooks/test_qubole.py',
<ide> 'tests/providers/samba/hooks/test_samba.py',
<ide> 'tests/providers/yandex/hooks/test_yandex.py' | 3 |
Javascript | Javascript | fix |cleanup| regression in the viewer | 5f8373919037321aee05ce0fbaf1aa3fa30358a3 | <ide><path>web/pdf_viewer.js
<ide> var PDFViewer = (function pdfViewer() {
<ide> this.scroll.down);
<ide> if (pageView) {
<ide> this.renderingQueue.renderView(pageView);
<del> return;
<add> return true;
<ide> }
<add> return false;
<ide> },
<ide>
<ide> getPageTextContent: function (pageIndex) { | 1 |
Text | Text | add http working group | 2b1ecfe78c75cbd0818a21d7cf7570f7c1044498 | <ide><path>WORKING_GROUPS.md
<ide> back in to the TSC.
<ide> * [Addon API](#addon-api)
<ide> * [Benchmarking](#benchmarking)
<ide> * [Post-mortem](#post-mortem)
<add>* [Intl](#intl)
<add>* [HTTP](#http)
<add>
<add>#### Process:
<add>
<ide> * [Starting a Working Group](#starting-a-wg)
<ide> * [Bootstrap Governance](#bootstrap-governance)
<del>* [Intl](#Intl)
<ide>
<ide> ### [Website](https://github.com/nodejs/website)
<ide>
<ide> Their responsibilities are:
<ide> * Publishing regular update summaries and other promotional
<ide> content.
<ide>
<add>### [HTTP](https://github.com/nodejs/http)
<add>
<add>The HTTP working group is chartered for the support and improvement of the
<add>HTTP implementation in Node. It's responsibilities are:
<add>
<add>* Addressing HTTP issues on the Node.js issue tracker.
<add>* Authoring and editing HTTP documentation within the Node.js project.
<add>* Reviewing changes to HTTP functionality within the Node.js project.
<add>* Working with the ecosystem of HTTP related module developers to evolve the
<add> HTTP implementation and APIs in core.
<add>* Advising the CTC on all HTTP related issues and discussions.
<add>* Messaging about the future of HTTP to give the community advance notice of
<add> changes.
<ide>
<ide> ### [Roadmap](https://github.com/nodejs/roadmap)
<ide> | 1 |
Text | Text | add mscdex as collaborator | 01296923db7715fac43c5a8fbad6aeafa74c1a4a | <ide><path>README.md
<ide> information about the governance of the io.js project, see
<ide> * **Shigeki Ohtsu** ([@shigeki](https://github.com/shigeki)) <ohtsu@iij.ad.jp>
<ide> * **Sam Roberts** ([@sam-github](https://github.com/sam-github)) <vieuxtech@gmail.com>
<ide> * **Wyatt Preul** ([@geek](https://github.com/geek)) <wpreul@gmail.com>
<add>* **Brian White** ([@mscdex](https://github.com/mscdex)) <mscdex@mscdex.net>
<ide>
<ide> Collaborators follow the [COLLABORATOR_GUIDE.md](./COLLABORATOR_GUIDE.md) in
<ide> maintaining the io.js project. | 1 |
PHP | PHP | add test for auth.redirect session var clearing | 7becd58237ecea627c0cfe97cb36cea4b94ad319 | <ide><path>lib/Cake/Test/Case/Controller/Component/AuthComponentTest.php
<ide> public function setUp() {
<ide> $this->Auth = new TestAuthComponent($collection);
<ide> $this->Auth->request = $request;
<ide> $this->Auth->response = $this->getMock('CakeResponse');
<add> AuthComponent::$sessionKey = 'Auth.User';
<ide>
<ide> $this->Controller->Components->init($this->Controller);
<ide>
<ide> public function testLoginActionNotSettingAuthRedirect() {
<ide> $this->assertNull($redirect);
<ide> }
<ide>
<add>/**
<add> * testRedirectVarClearing method
<add> *
<add> * @return void
<add> */
<add> public function testRedirectVarClearing() {
<add> $this->Controller->request['controller'] = 'auth_test';
<add> $this->Controller->request['action'] = 'admin_add';
<add> $this->Controller->here = '/auth_test/admin_add';
<add> $this->assertNull($this->Auth->Session->read('Auth.redirect'));
<add>
<add> $this->Auth->authenticate = array('Form');
<add> $this->Auth->startup($this->Controller);
<add> $this->assertEquals('/auth_test/admin_add', $this->Auth->Session->read('Auth.redirect'));
<add>
<add> $this->Auth->Session->write('Auth.User', array('username' => 'admad'));
<add> $this->Auth->startup($this->Controller);
<add> $this->assertNull($this->Auth->Session->read('Auth.redirect'));
<add> }
<add>
<ide> /**
<ide> * testAuthorizeFalse method
<ide> * | 1 |
Go | Go | fix typo in deprecation message | bf14bacac3f1cae15ce7b32b5341122c305e630e | <ide><path>api/client/commands.go
<ide> func (cli *DockerCli) CmdImport(args ...string) error {
<ide> v.Set("repo", repository)
<ide>
<ide> if cmd.NArg() == 3 {
<del> fmt.Fprintf(cli.err, "[DEPRECATED] The format 'URL|- [REPOSITORY [TAG]]' as been deprecated. Please use URL|- [REPOSITORY[:TAG]]\n")
<add> fmt.Fprintf(cli.err, "[DEPRECATED] The format 'URL|- [REPOSITORY [TAG]]' has been deprecated. Please use URL|- [REPOSITORY[:TAG]]\n")
<ide> v.Set("tag", cmd.Arg(2))
<ide> }
<ide> | 1 |
Java | Java | improve constantfieldfeature compatibility | b64edebadc6e26abaaf670ef6eec18cf38aea1d3 | <ide><path>spring-core/graalvm/src/main/java/org/springframework/aot/graalvm/ConstantFieldFeature.java
<ide> private void duringSetup(DuringSetupAccessImpl access) {
<ide> DebugContext debug = access.getDebugContext();
<ide> try (DebugContext.Scope scope = debug.scope("ConstantFieldFeature.duringSetup")) {
<ide> debug.log("Installing constant field substitution processor : " + scope);
<del> ClassLoader applicationClassLoader = access.getApplicationClassLoader();
<add> ClassLoader classLoader = ConstantFieldFeature.class.getClassLoader();
<ide> ConstantFieldSubstitutionProcessor substitutionProcessor =
<del> new ConstantFieldSubstitutionProcessor(debug, applicationClassLoader);
<add> new ConstantFieldSubstitutionProcessor(debug, classLoader);
<ide> access.registerSubstitutionProcessor(substitutionProcessor);
<ide> }
<ide> }
<ide><path>spring-core/graalvm/src/main/java/org/springframework/aot/graalvm/ConstantReadableJavaField.java
<ide>
<ide> import java.lang.annotation.Annotation;
<ide>
<add>import com.oracle.graal.pointsto.infrastructure.WrappedElement;
<ide> import com.oracle.svm.core.meta.ReadableJavaField;
<ide> import jdk.vm.ci.meta.JavaConstant;
<ide> import jdk.vm.ci.meta.JavaType;
<ide> * @author Phillip Webb
<ide> * @since 6.0
<ide> */
<del>class ConstantReadableJavaField implements ReadableJavaField {
<add>class ConstantReadableJavaField implements ReadableJavaField, WrappedElement {
<ide>
<ide> private final ResolvedJavaField original;
<ide>
<ide> public boolean injectFinalForRuntimeCompilation() {
<ide> return true;
<ide> }
<ide>
<add> @Override
<add> public Object getWrapped() {
<add> return this.original;
<add> }
<ide> } | 2 |
Java | Java | add support for extra messageproducer method | c2da8467320660a98897e5f3a2b7cf1c637146b3 | <ide><path>spring-jms/src/main/java/org/springframework/jms/connection/CachedMessageProducer.java
<ide> public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl
<ide> else if (args.length == 3) {
<ide> return sendWithDestinationAndCompletionListenerMethod.invoke(
<ide> target, args[0], args[1], deliveryMode, priority, timeToLive, args[2]);
<add> } else if (args.length == 5) {
<add> return sendWithCompletionListenerMethod.invoke(
<add> target, args[0], args[1], args[2], args[3], args[4]);
<add> } else if (args.length == 6) {
<add> return sendWithDestinationAndCompletionListenerMethod.invoke(
<add> target, args[0], args[1], args[2], args[3], args[4], args[5]);
<ide> }
<ide> }
<ide> return method.invoke(CachedMessageProducer.this, args); | 1 |
Javascript | Javascript | improve ci stability | 87846747df5de34f1a49abfe587d7a2daf9ecfea | <ide><path>test/Compiler.test.js
<ide> describe("Compiler", () => {
<ide> throw stats.errors[0];
<ide> }
<ide> stats.logs = logs;
<del> callback(stats, files, compilation);
<add> c.close(err => {
<add> if (err) return callback(err);
<add> callback(stats, files, compilation);
<add> });
<ide> });
<ide> }
<ide>
<add> let compiler;
<add> afterEach(callback => {
<add> if (compiler) {
<add> compiler.close(callback);
<add> compiler = undefined;
<add> } else {
<add> callback();
<add> }
<add> });
<add>
<ide> it("should compile a single file to deep output", done => {
<ide> compile(
<ide> "./c",
<ide> describe("Compiler", () => {
<ide> }
<ide> });
<ide> });
<add> afterEach(callback => {
<add> if (compiler) {
<add> compiler.close(callback);
<add> compiler = undefined;
<add> } else {
<add> callback();
<add> }
<add> });
<ide> describe("purgeInputFileSystem", () => {
<ide> it("invokes purge() if inputFileSystem.purge", done => {
<ide> const mockPurge = jest.fn();
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should not emit on errors", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./missing",
<ide> describe("Compiler", () => {
<ide> resolve(stats);
<ide> }
<ide> });
<add> return c;
<ide> });
<ide> };
<del> const compiler = await createCompiler({
<add> compiler = await createCompiler({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./missing-file",
<ide> describe("Compiler", () => {
<ide> bail: true
<ide> });
<ide> done();
<del> return compiler;
<ide> } catch (err) {
<ide> expect(err.toString()).toMatch(
<ide> "ModuleNotFoundError: Module not found: Error: Can't resolve './missing-file'"
<ide> describe("Compiler", () => {
<ide> });
<ide> it("should not emit compilation errors in async (watch)", async done => {
<ide> try {
<del> const createCompiler = options => {
<add> const createStats = options => {
<ide> return new Promise((resolve, reject) => {
<ide> const c = webpack(options);
<ide> c.outputFileSystem = createFsFromVolume(new Volume());
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> };
<del> const compiler = await createCompiler({
<add> const stats = await createStats({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./missing-file",
<ide> describe("Compiler", () => {
<ide> filename: "bundle.js"
<ide> }
<ide> });
<del> expect(compiler).toBeInstanceOf(Stats);
<add> expect(stats).toBeInstanceOf(Stats);
<ide> done();
<ide> } catch (err) {
<ide> done(err);
<ide> }
<ide> });
<ide>
<ide> it("should not emit on errors (watch)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./missing",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should not be running twice at a time (run)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should not be running twice at a time (watch)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should not be running twice at a time (run - watch)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should not be running twice at a time (watch - run)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should not be running twice at a time (instance cb)", done => {
<del> const compiler = webpack(
<add> compiler = webpack(
<ide> {
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should run again correctly after first compilation", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should watch again correctly after first compilation", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> compiler.run((err, stats) => {
<ide> if (err) return done(err);
<ide>
<del> compiler.watch({}, (err, stats) => {
<add> const watching = compiler.watch({}, (err, stats) => {
<ide> if (err) return done(err);
<del> done();
<add> watching.close(done);
<ide> });
<ide> });
<ide> });
<ide> it("should run again correctly after first closed watch", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should set compiler.watching correctly", function (done) {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> compiler.outputFileSystem = createFsFromVolume(new Volume());
<ide> const watching = compiler.watch({}, (err, stats) => {
<ide> if (err) return done(err);
<del> done();
<add> watching.close(done);
<ide> });
<ide> expect(compiler.watching).toBe(watching);
<ide> });
<ide> it("should watch again correctly after first closed watch", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should run again correctly inside afterDone hook", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should call afterDone hook after other callbacks (run)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> it("should call afterDone hook after other callbacks (instance cb)", done => {
<ide> const instanceCb = jest.fn();
<del> const compiler = webpack(
<add> compiler = webpack(
<ide> {
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should call afterDone hook after other callbacks (watch)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> expect(doneHookCb).toHaveBeenCalled();
<ide> expect(watchCb).toHaveBeenCalled();
<ide> expect(invalidateCb).toHaveBeenCalled();
<del> done();
<add> watching.close(done);
<ide> });
<del> const watch = compiler.watch({}, (err, stats) => {
<add> const watching = compiler.watch({}, (err, stats) => {
<ide> if (err) return done(err);
<ide> watchCb();
<ide> });
<ide> process.nextTick(() => {
<del> watch.invalidate(invalidateCb);
<add> watching.invalidate(invalidateCb);
<ide> });
<ide> });
<ide> it("should call afterDone hook after other callbacks (watch close)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should flag watchMode as true in watch", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> entry: "./c",
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should use cache on second run call", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: __dirname,
<ide> mode: "development",
<ide> devtool: false,
<ide> describe("Compiler", () => {
<ide> });
<ide> it("should call the failed-hook on error", done => {
<ide> const failedSpy = jest.fn();
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> bail: true,
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> }
<ide> }
<ide> it("should log to the console (verbose)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: path.join(__dirname, "fixtures"),
<ide> entry: "./a",
<ide> output: {
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should log to the console (debug mode)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: path.join(__dirname, "fixtures"),
<ide> entry: "./a",
<ide> output: {
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should log to the console (none)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: path.join(__dirname, "fixtures"),
<ide> entry: "./a",
<ide> output: {
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should log to the console with colors (verbose)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: path.join(__dirname, "fixtures"),
<ide> entry: "./a",
<ide> output: {
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> it("should log to the console with colors (debug mode)", done => {
<del> const compiler = webpack({
<add> compiler = webpack({
<ide> context: path.join(__dirname, "fixtures"),
<ide> entry: "./a",
<ide> output: { | 1 |
PHP | PHP | fix two typos in cacheengine.php | f3262f768bba13fadc1b9dbe867a0c25da61c9ee | <ide><path>lib/Cake/Cache/CacheEngine.php
<ide> abstract public function clear($check);
<ide>
<ide> /**
<ide> * Clears all values belonging to a group. Is upt to the implementing engine
<del> * to decide whether actually deete the keys or just simulate it to acheive
<add> * to decide whether actually delete the keys or just simulate it to achieve
<ide> * the same result.
<ide> *
<ide> * @param string $groups name of the group to be cleared | 1 |
Python | Python | pass kwargs to configuration | b623ddc0002aebe32e2b7a1203a6acbed61bf9a8 | <ide><path>src/transformers/configuration_utils.py
<ide> def __init__(self, **kwargs):
<ide> logger.error("Can't set {} with value {} for {}".format(key, value, self))
<ide> raise err
<ide>
<add> @property
<add> def num_labels(self):
<add> return self._num_labels
<add>
<add> @num_labels.setter
<add> def num_labels(self, num_labels):
<add> self._num_labels = num_labels
<add> self.id2label = {i: "LABEL_{}".format(i) for i in range(self.num_labels)}
<add> self.id2label = dict((int(key), value) for key, value in self.id2label.items())
<add> self.label2id = dict(zip(self.id2label.values(), self.id2label.keys()))
<add> self.label2id = dict((key, int(value)) for key, value in self.label2id.items())
<add>
<ide> def save_pretrained(self, save_directory):
<ide> """
<ide> Save a configuration object to the directory `save_directory`, so that it
<ide><path>tests/test_configuration_common.py
<ide> def create_and_test_config_from_and_save_pretrained(self):
<ide>
<ide> self.parent.assertEqual(config_second.to_dict(), config_first.to_dict())
<ide>
<add> def create_and_test_config_with_num_labels(self):
<add> config = self.config_class(**self.inputs_dict, num_labels=5)
<add> self.parent.assertEqual(len(config.id2label), 5)
<add> self.parent.assertEqual(len(config.label2id), 5)
<add>
<add> config.num_labels = 3
<add> self.parent.assertEqual(len(config.id2label), 3)
<add> self.parent.assertEqual(len(config.label2id), 3)
<add>
<ide> def run_common_tests(self):
<ide> self.create_and_test_config_common_properties()
<ide> self.create_and_test_config_to_json_string()
<ide> self.create_and_test_config_to_json_file()
<ide> self.create_and_test_config_from_and_save_pretrained()
<add> self.create_and_test_config_with_num_labels() | 2 |
Text | Text | fix markdown escape in updating.md | a28c9c64f6623bdea0089aeb404bad98f9dd8a7d | <ide><path>UPDATING.md
<ide> Some commands have been grouped to improve UX of CLI. New commands are available
<ide>
<ide> For Airflow short option, use exactly one single character, New commands are available according to the following table:
<ide>
<del>| Old command | New command |
<del>| :------------------------------------------------- | :------------------------------------------------ |
<del>| ``airflow (dags|tasks|scheduler) [-sd, --subdir]`` | ``airflow (dags|tasks|scheduler) [-S, --subdir]`` |
<del>| ``airflow tasks test [-dr, --dry_run]`` | ``airflow tasks test [-n, --dry-run]`` |
<del>| ``airflow dags backfill [-dr, --dry_run]`` | ``airflow dags backfill [-n, --dry-run]`` |
<del>| ``airflow tasks clear [-dx, --dag_regex]`` | ``airflow tasks clear [-R, --dag-regex]`` |
<del>| ``airflow kerberos [-kt, --keytab]`` | ``airflow kerberos [-k, --keytab]`` |
<del>| ``airflow tasks run [-int, --interactive]`` | ``airflow tasks run [-N, --interactive]`` |
<del>| ``airflow webserver [-hn, --hostname]`` | ``airflow webserver [-H, --hostname]`` |
<del>| ``airflow celery worker [-cn, --celery_hostname]`` | ``airflow celery worker [-H, --celery-hostname]`` |
<del>| ``airflow celery flower [-hn, --hostname]`` | ``airflow celery flower [-H, --hostname]`` |
<del>| ``airflow celery flower [-fc, --flower_conf]`` | ``airflow celery flower [-c, --flower-conf]`` |
<del>| ``airflow celery flower [-ba, --basic_auth]`` | ``airflow celery flower [-A, --basic-auth]`` |
<del>| ``airflow celery flower [-tp, --task_params]`` | ``airflow celery flower [-t, --task-params]`` |
<del>| ``airflow celery flower [-pm, --post_mortem]`` | ``airflow celery flower [-m, --post-mortem]`` |
<add>| Old command | New command |
<add>| :----------------------------------------------------| :---------------------------------------------------|
<add>| ``airflow (dags\|tasks\|scheduler) [-sd, --subdir]`` | ``airflow (dags\|tasks\|scheduler) [-S, --subdir]`` |
<add>| ``airflow tasks test [-dr, --dry_run]`` | ``airflow tasks test [-n, --dry-run]`` |
<add>| ``airflow dags backfill [-dr, --dry_run]`` | ``airflow dags backfill [-n, --dry-run]`` |
<add>| ``airflow tasks clear [-dx, --dag_regex]`` | ``airflow tasks clear [-R, --dag-regex]`` |
<add>| ``airflow kerberos [-kt, --keytab]`` | ``airflow kerberos [-k, --keytab]`` |
<add>| ``airflow tasks run [-int, --interactive]`` | ``airflow tasks run [-N, --interactive]`` |
<add>| ``airflow webserver [-hn, --hostname]`` | ``airflow webserver [-H, --hostname]`` |
<add>| ``airflow celery worker [-cn, --celery_hostname]`` | ``airflow celery worker [-H, --celery-hostname]`` |
<add>| ``airflow celery flower [-hn, --hostname]`` | ``airflow celery flower [-H, --hostname]`` |
<add>| ``airflow celery flower [-fc, --flower_conf]`` | ``airflow celery flower [-c, --flower-conf]`` |
<add>| ``airflow celery flower [-ba, --basic_auth]`` | ``airflow celery flower [-A, --basic-auth]`` |
<add>| ``airflow celery flower [-tp, --task_params]`` | ``airflow celery flower [-t, --task-params]`` |
<add>| ``airflow celery flower [-pm, --post_mortem]`` | ``airflow celery flower [-m, --post-mortem]`` |
<ide>
<ide> For Airflow long option, use [kebab-case](https://en.wikipedia.org/wiki/Letter_case) instead of [snake_case](https://en.wikipedia.org/wiki/Snake_case)
<ide> | 1 |
Text | Text | fix bullet spacing for website | 8efe148f0731189aed10e2828dc6cffee94c6397 | <ide><path>docs/NativeComponentsIOS.md
<ide> Let's say we want to add an interactive Map to our app - might as well use [`MKM
<ide> Native views are created and manipulated by subclasses of `RCTViewManager`. These subclasses are similar in function to view controllers, but are essentially singletons - only one instance of each is created by the bridge. They vend native views to the `RCTUIManager`, which delegates back to them to set and update the properties of the views as necessary. The `RCTViewManager`s are also typically the delegates for the views, sending events back to JavaScript via the bridge.
<ide>
<ide> Vending a view is simple:
<add>
<ide> - Create the basic subclass.
<ide> - Add the `RCT_EXPORT_MODULE()` marker macro.
<ide> - Implement the `-(UIView *)view` method | 1 |
Javascript | Javascript | lint the tests | 81a1cd8caad06f4c637dcbde7fa9d60609d8295c | <ide><path>test/Compiler.test.js
<ide> describe("Compiler", () => {
<ide> });
<ide> });
<ide> });
<del> it("should not emit on errors", function(done) {
<add> it("should not emit on errors", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> return done(new Error("Bundle should not be created on error"));
<ide> done();
<ide> });
<del> });
<del> it("should not be run twice at a time (run)", function(done) {
<add> });
<add> it("should not be run twice at a time (run)", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> if (err) return done();
<ide> });
<ide> });
<del> it("should not be run twice at a time (watch)", function(done) {
<add> it("should not be run twice at a time (watch)", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> compiler.watch({}, (err, stats) => {
<ide> if (err) return done();
<ide> });
<del> });
<del> it("should not be run twice at a time (run - watch)", function(done) {
<add> });
<add> it("should not be run twice at a time (run - watch)", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> compiler.run((err, stats) => {
<ide> if (err) return done(err);
<ide> });
<del> compiler.watch({}, (err, stats) => {
<add> compiler.watch({}, (err, stats) => {
<ide> if (err) return done();
<ide> });
<ide> });
<del> it("should not be run twice at a time (watch - run)", function(done) {
<add> it("should not be run twice at a time (watch - run)", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> }
<ide> });
<ide> compiler.outputFileSystem = new MemoryFs();
<del> compiler.watch({}, (err, stats) => {
<add> compiler.watch({}, (err, stats) => {
<ide> if (err) return done(err);
<ide> });
<del> compiler.run((err, stats) => {
<add> compiler.run((err, stats) => {
<ide> if (err) return done();
<ide> });
<ide> });
<del> it("should not be run twice at a time (instance cb)", function(done) {
<del> const compiler = webpack({
<del> context: __dirname,
<del> mode: "production",
<del> entry: "./c",
<del> output: {
<del> path: "/",
<del> filename: "bundle.js"
<del> }
<del> }, () => {});
<add> it("should not be run twice at a time (instance cb)", function(done) {
<add> const compiler = webpack(
<add> {
<add> context: __dirname,
<add> mode: "production",
<add> entry: "./c",
<add> output: {
<add> path: "/",
<add> filename: "bundle.js"
<add> }
<add> },
<add> () => {}
<add> );
<ide> compiler.outputFileSystem = new MemoryFs();
<del> compiler.run((err, stats) => {
<add> compiler.run((err, stats) => {
<ide> if (err) return done();
<ide> });
<ide> });
<del> it("should run again correctly after first compilation", function(done) {
<add> it("should run again correctly after first compilation", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> }
<ide> });
<ide> compiler.outputFileSystem = new MemoryFs();
<del> compiler.run((err, stats) => {
<del> if (err) return done(err);
<add> compiler.run((err, stats) => {
<add> if (err) return done(err);
<ide>
<del> compiler.run((err, stats) => {
<del> if (err) return done(err);
<del> done()
<del> });
<add> compiler.run((err, stats) => {
<add> if (err) return done(err);
<add> done();
<add> });
<ide> });
<ide> });
<del> it("should watch again correctly after first compilation", function(done) {
<add> it("should watch again correctly after first compilation", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> }
<ide> });
<ide> compiler.outputFileSystem = new MemoryFs();
<del> compiler.run((err, stats) => {
<del> if (err) return done(err);
<add> compiler.run((err, stats) => {
<add> if (err) return done(err);
<ide>
<del> compiler.watch({}, (err, stats) => {
<del> if (err) return done(err);
<del> done()
<del> });
<add> compiler.watch({}, (err, stats) => {
<add> if (err) return done(err);
<add> done();
<add> });
<ide> });
<ide> });
<del> it("should run again correctly after first closed watch", function(done) {
<add> it("should run again correctly after first closed watch", function(done) {
<ide> const compiler = webpack({
<ide> context: __dirname,
<ide> mode: "production",
<ide> describe("Compiler", () => {
<ide> }
<ide> });
<ide> compiler.outputFileSystem = new MemoryFs();
<del> const watching = compiler.watch({}, (err, stats) => {
<del> if (err) return done(err);
<del> done()
<del> });
<del> watching.close(() => {
<del> compiler.run((err, stats) => {
<del> if (err) return done(err);
<del> done()
<del> });
<del> })
<add> const watching = compiler.watch({}, (err, stats) => {
<add> if (err) return done(err);
<add> done();
<add> });
<add> watching.close(() => {
<add> compiler.run((err, stats) => {
<add> if (err) return done(err);
<add> done();
<add> });
<add> });
<ide> });
<ide> }); | 1 |
PHP | PHP | catch more things | ed3154d8af8f1b9dbb034e2dc92100a0f8f5d4b2 | <ide><path>src/Illuminate/View/View.php
<ide> public function render(callable $callback = null)
<ide> } catch (Exception $e) {
<ide> $this->factory->flushSections();
<ide>
<add> throw $e;
<add> } catch (Throwable $e) {
<add> $this->factory->flushSections();
<add>
<ide> throw $e;
<ide> }
<ide> } | 1 |
Go | Go | enable resource limitation | 409bbdc3212a37e7a21a70eeae0b44e96509f54d | <ide><path>pkg/sysinfo/sysinfo_linux.go
<ide> func New(quiet bool) *SysInfo {
<ide> w := o(sysInfo, cgMounts)
<ide> warnings = append(warnings, w...)
<ide> }
<add> if cgroups.IsCgroup2UnifiedMode() {
<add> warnings = append(warnings, "Your system is running cgroup v2 (unsupported)")
<add> }
<ide> if !quiet {
<ide> for _, w := range warnings {
<ide> logrus.Warn(w)
<ide> func New(quiet bool) *SysInfo {
<ide>
<ide> // applyMemoryCgroupInfo reads the memory information from the memory cgroup mount point.
<ide> func applyMemoryCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<add> if cgroups.IsCgroup2UnifiedMode() {
<add> // TODO: check cgroup2 info correctly
<add> info.MemoryLimit = true
<add> info.SwapLimit = true
<add> info.MemoryReservation = true
<add> info.OomKillDisable = true
<add> info.MemorySwappiness = true
<add> return nil
<add> }
<ide> var warnings []string
<ide> mountPoint, ok := cgMounts["memory"]
<ide> if !ok {
<ide> func applyMemoryCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<ide>
<ide> // applyCPUCgroupInfo reads the cpu information from the cpu cgroup mount point.
<ide> func applyCPUCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<add> if cgroups.IsCgroup2UnifiedMode() {
<add> // TODO: check cgroup2 info correctly
<add> info.CPUShares = true
<add> info.CPUCfsPeriod = true
<add> info.CPUCfsQuota = true
<add> info.CPURealtimePeriod = true
<add> info.CPURealtimeRuntime = true
<add> return nil
<add> }
<ide> var warnings []string
<ide> mountPoint, ok := cgMounts["cpu"]
<ide> if !ok {
<ide> func applyCPUCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<ide>
<ide> // applyBlkioCgroupInfo reads the blkio information from the blkio cgroup mount point.
<ide> func applyBlkioCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<add> if cgroups.IsCgroup2UnifiedMode() {
<add> // TODO: check cgroup2 info correctly
<add> info.BlkioWeight = true
<add> info.BlkioReadBpsDevice = true
<add> info.BlkioWriteBpsDevice = true
<add> info.BlkioReadIOpsDevice = true
<add> info.BlkioWriteIOpsDevice = true
<add> return nil
<add> }
<ide> var warnings []string
<ide> mountPoint, ok := cgMounts["blkio"]
<ide> if !ok {
<ide> func applyBlkioCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<ide>
<ide> // applyCPUSetCgroupInfo reads the cpuset information from the cpuset cgroup mount point.
<ide> func applyCPUSetCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<add> if cgroups.IsCgroup2UnifiedMode() {
<add> // TODO: check cgroup2 info correctly
<add> info.Cpuset = true
<add> return nil
<add> }
<ide> var warnings []string
<ide> mountPoint, ok := cgMounts["cpuset"]
<ide> if !ok {
<ide> func applyCPUSetCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<ide>
<ide> // applyPIDSCgroupInfo reads the pids information from the pids cgroup mount point.
<ide> func applyPIDSCgroupInfo(info *SysInfo, _ map[string]string) []string {
<add> if cgroups.IsCgroup2UnifiedMode() {
<add> // TODO: check cgroup2 info correctly
<add> info.PidsLimit = true
<add> return nil
<add> }
<ide> var warnings []string
<ide> _, err := cgroups.FindCgroupMountpoint("", "pids")
<ide> if err != nil {
<ide> func applyPIDSCgroupInfo(info *SysInfo, _ map[string]string) []string {
<ide>
<ide> // applyDevicesCgroupInfo reads the pids information from the devices cgroup mount point.
<ide> func applyDevicesCgroupInfo(info *SysInfo, cgMounts map[string]string) []string {
<add> if cgroups.IsCgroup2UnifiedMode() {
<add> // TODO: check cgroup2 info correctly
<add> info.CgroupDevicesEnabled = true
<add> return nil
<add> }
<ide> var warnings []string
<ide> _, ok := cgMounts["devices"]
<ide> info.CgroupDevicesEnabled = ok | 1 |
Ruby | Ruby | move reloader middleware in actiondispatch | d7396b5ca9c066cb16158c02b976dab01f522344 | <ide><path>actionpack/lib/action_controller.rb
<ide> def self.load_all!
<ide> autoload :PolymorphicRoutes, 'action_controller/routing/generation/polymorphic_routes'
<ide> autoload :RecordIdentifier, 'action_controller/record_identifier'
<ide> autoload :Redirector, 'action_controller/base/redirect'
<del> autoload :Reloader, 'action_controller/reloader'
<ide> autoload :Renderer, 'action_controller/base/render'
<ide> autoload :RequestForgeryProtection, 'action_controller/base/request_forgery_protection'
<ide> autoload :Rescue, 'action_controller/dispatch/rescue'
<ide><path>actionpack/lib/action_controller/dispatch/dispatcher.rb
<ide> class Dispatcher
<ide> class << self
<ide> def define_dispatcher_callbacks(cache_classes)
<ide> unless cache_classes
<del> unless self.middleware.include?(Reloader)
<del> self.middleware.insert_after(ActionDispatch::Failsafe, Reloader)
<add> unless self.middleware.include?(ActionDispatch::Reloader)
<add> self.middleware.insert_after(ActionDispatch::Failsafe, ActionDispatch::Reloader)
<ide> end
<ide>
<ide> ActionView::Helpers::AssetTagHelper.cache_asset_timestamps = false
<ide><path>actionpack/lib/action_dispatch.rb
<ide> module ActionDispatch
<ide>
<ide> autoload :Failsafe, 'action_dispatch/middleware/failsafe'
<ide> autoload :ParamsParser, 'action_dispatch/middleware/params_parser'
<add> autoload :Reloader, 'action_dispatch/middleware/reloader'
<ide> autoload :RewindableInput, 'action_dispatch/middleware/rewindable_input'
<ide> autoload :MiddlewareStack, 'action_dispatch/middleware/stack'
<ide>
<add><path>actionpack/lib/action_dispatch/middleware/reloader.rb
<del><path>actionpack/lib/action_controller/reloader.rb
<del>module ActionController
<add>module ActionDispatch
<ide> class Reloader
<ide> def initialize(app)
<ide> @app = app
<ide> end
<ide>
<ide> def call(env)
<del> Dispatcher.reload_application
<add> ActionController::Dispatcher.reload_application
<ide> @app.call(env)
<ide> ensure
<del> Dispatcher.cleanup_application
<add> ActionController::Dispatcher.cleanup_application
<ide> end
<ide> end
<ide> end | 4 |
Ruby | Ruby | fix undefined method crash | ac7a59373087e9d49097ab7f0ddb691e64159959 | <ide><path>Library/Homebrew/utils/inreplace.rb
<ide> module Utils
<ide> class InreplaceError < RuntimeError
<ide> def initialize(errors)
<del> super errors.inject("inreplace failed\n") do |s, (path, errs)|
<add> formatted_errors = errors.inject("inreplace failed\n") do |s, (path, errs)|
<ide> s << "#{path}:\n" << errs.map { |e| " #{e}\n" }.join
<ide> end
<add> super formatted_errors
<ide> end
<ide> end
<ide> | 1 |
Text | Text | fix typo in error.capturestacktrace | 409418a209558263aebf0ba60c3ebd390b015508 | <ide><path>doc/api/errors.md
<ide> function MyError() {
<ide> }
<ide>
<ide> // Without passing MyError to captureStackTrace, the MyError
<del>// frame would should up in the .stack property. by passing
<add>// frame would show up in the .stack property. By passing
<ide> // the constructor, we omit that frame and all frames above it.
<ide> new MyError().stack
<ide> ``` | 1 |
PHP | PHP | use static instead of event | 2ca07e8ac7c3b5af8a75c4bb19f49b60988d7e22 | <ide><path>src/Illuminate/Support/Facades/Event.php
<ide> public static function fake($eventsToFake = [])
<ide> */
<ide> public static function fakeFor(callable $callable, array $eventsToFake = [])
<ide> {
<del> $initialDispatcher = Event::getFacadeRoot();
<add> $initialDispatcher = static::getFacadeRoot();
<ide>
<del> Event::fake($eventsToFake);
<add> static::fake($eventsToFake);
<ide>
<ide> return tap($callable(), function () use ($initialDispatcher) {
<ide> Model::setEventDispatcher($initialDispatcher);
<ide>
<del> Event::swap($initialDispatcher);
<add> static::swap($initialDispatcher);
<ide> });
<ide> }
<ide> | 1 |
PHP | PHP | fix doc blocks | c369d6745e07cda661cf8f5ffcef40d0cb7b0132 | <ide><path>src/Illuminate/Database/Eloquent/Model.php
<ide> abstract class Model implements ArrayAccess, ArrayableInterface, JsonableInterfa
<ide> protected $fillable = array();
<ide>
<ide> /**
<del> * The attribute that aren't mass assignable.
<add> * The attributes that aren't mass assignable.
<ide> *
<ide> * @var array
<ide> */
<ide><path>src/Illuminate/Database/Eloquent/Relations/Pivot.php
<ide> class Pivot extends Model {
<ide> protected $otherKey;
<ide>
<ide> /**
<del> * The attribute that aren't mass assignable.
<add> * The attributes that aren't mass assignable.
<ide> *
<ide> * @var array
<ide> */ | 2 |
Javascript | Javascript | update the css rule with data-ng-cloak | 90ba9aadc6693e01487e6e14e7d1065658572e0f | <ide><path>src/ng/directive/ngCloak.js
<ide> * `angular.min.js` files. Following is the css rule:
<ide> *
<ide> * <pre>
<del> * [ng\:cloak], [ng-cloak], .ng-cloak {
<add> * [ng\:cloak], [ng-cloak], [data-ng-cloak], [x-ng-cloak], .ng-cloak, .x-ng-cloak {
<ide> * display: none;
<ide> * }
<ide> * </pre> | 1 |
Ruby | Ruby | extract macos module to separate file | 0bb95960e642782c85eb7e227eba844607fc2d00 | <ide><path>Library/Homebrew/macos.rb
<add>module MacOS extend self
<add> def version
<add> MACOS_VERSION
<add> end
<add>
<add> def cat
<add> if mountain_lion?
<add> :mountainlion
<add> elsif lion?
<add> :lion
<add> elsif snow_leopard?
<add> :snowleopard
<add> elsif leopard?
<add> :leopard
<add> else
<add> nil
<add> end
<add> end
<add>
<add> def clt_installed?
<add> # If the command line tools are installed, most unix standard
<add> # tools, libs and headers are in /usr.
<add> # Returns true, also for older Xcode/OSX versions that had everything in /usr
<add> # Beginning with Xcode 4.3, the dev tools are no longer installed
<add> # in /usr and SDKs no longer in /Developer by default.
<add> # But Apple provides an optional "Command Line Tools for Xcode" package.
<add> not clt_version.empty? or dev_tools_path == Pathname.new("/usr/bin")
<add> end
<add>
<add> def clt_version
<add> # Version string (a pretty damn long one) of the CLT package.
<add> # Note, that different ways to install the CLTs lead to different
<add> # version numbers.
<add> @clt_version ||= begin
<add> # CLT installed via stand-alone website download
<add> clt_pkginfo_stand_alone = `pkgutil --pkg-info com.apple.pkg.DeveloperToolsCLILeo 2>/dev/null`.strip
<add> # CLT installed via preferences from within Xcode
<add> clt_pkginfo_from_xcode = `pkgutil --pkg-info com.apple.pkg.DeveloperToolsCLI 2>/dev/null`.strip
<add> if not clt_pkginfo_stand_alone.empty?
<add> clt_pkginfo_stand_alone =~ /version: (.*)$/
<add> $1
<add> elsif not clt_pkginfo_from_xcode.empty?
<add> clt_pkginfo_from_xcode =~ /version: (.*)$/
<add> $1
<add> else
<add> # We return "" instead of nil because we want clt_installed? to be true on older Macs.
<add> # So clt_version.empty? does not mean there are no unix tools in /usr, it just means
<add> # that the "Command Line Tools for Xcode" package is not installed
<add> "" # No CLT or recipe available to pkgutil.
<add> end
<add> end
<add> end
<add>
<add> # Locate the "current Xcode folder" via xcode-select. See:
<add> # man xcode-select
<add> def xcode_folder
<add> @xcode_folder ||= `xcode-select -print-path 2>/dev/null`.strip
<add> end
<add>
<add> # Xcode 4.3 tools hang if "/" is set
<add> def xctools_fucked?
<add> xcode_folder == "/"
<add> end
<add>
<add> def locate tool
<add> # Don't call tools (cc, make, strip, etc.) directly!
<add> # Give the name of the binary you look for as a string to this method
<add> # in order to get the full path back as a Pathname.
<add> tool = tool.to_s
<add>
<add> @locate_cache ||= {}
<add> return @locate_cache[tool] if @locate_cache.has_key? tool
<add>
<add> if File.executable? "/usr/bin/#{tool}"
<add> path = Pathname.new "/usr/bin/#{tool}"
<add> else
<add> # Xcrun was provided first with Xcode 4.3 and allows us to proxy
<add> # tool usage thus avoiding various bugs.
<add> p = `/usr/bin/xcrun -find #{tool} 2>/dev/null`.chomp unless MacOS.xctools_fucked?
<add> if !p.nil? and !p.empty? and File.executable? p
<add> path = Pathname.new p
<add> else
<add> # This is for the use-case where xcode-select is not set up correctly
<add> # with Xcode 4.3+. The tools in Xcode 4.3+ are split over two locations,
<add> # usually xcrun would figure that out for us, but it won't work if
<add> # xcode-select is not configured properly.
<add> p = "#{MacOS.dev_tools_path}/#{tool}"
<add> if File.executable? p
<add> path = Pathname.new p
<add> else
<add> # Otherwise lets look in the second location.
<add> p = "#{MacOS.xctoolchain_path}/usr/bin/#{tool}"
<add> if File.executable? p
<add> path = Pathname.new p
<add> else
<add> # We digged so deep but all is lost now.
<add> path = nil
<add> end
<add> end
<add> end
<add> end
<add> @locate_cache[tool] = path
<add> return path
<add> end
<add>
<add> def dev_tools_path
<add> @dev_tools_path ||= if File.exist? "/usr/bin/cc" and File.exist? "/usr/bin/make"
<add> # probably a safe enough assumption (the unix way)
<add> Pathname.new "/usr/bin"
<add> elsif not xctools_fucked? and system "/usr/bin/xcrun -find make 1>/dev/null 2>&1"
<add> # Wherever "make" is there are the dev tools.
<add> Pathname.new(`/usr/bin/xcrun -find make`.chomp).dirname
<add> elsif File.exist? "#{xcode_prefix}/usr/bin/make"
<add> # cc stopped existing with Xcode 4.3, there are c89 and c99 options though
<add> Pathname.new "#{xcode_prefix}/usr/bin"
<add> else
<add> # Since we are pretty unrelenting in finding Xcode no matter where
<add> # it hides, we can now throw in the towel.
<add> opoo "You really should consult the `brew doctor`!"
<add> ""
<add> end
<add> end
<add>
<add> def xctoolchain_path
<add> # Beginning with Xcode 4.3, clang and some other tools are located in a xctoolchain dir.
<add> @xctoolchain_path ||= begin
<add> path = Pathname.new("#{MacOS.xcode_prefix}/Toolchains/XcodeDefault.xctoolchain")
<add> if path.exist?
<add> path
<add> else
<add> # ok, there are no Toolchains in xcode_prefix
<add> # and that's ok as long as everything is in dev_tools_path="/usr/bin" (i.e. clt_installed?)
<add> nil
<add> end
<add> end
<add> end
<add>
<add> def sdk_path(v=MacOS.version)
<add> # The path of the MacOSX SDK.
<add> if !MacOS.xctools_fucked? and File.executable? "#{xcode_folder}/usr/bin/make"
<add> path = `#{locate('xcodebuild')} -version -sdk macosx#{v} Path 2>/dev/null`.strip
<add> elsif File.directory? '/Developer/SDKs/MacOS#{v}.sdk'
<add> # the old default (or wild wild west style)
<add> path = "/Developer/SDKs/MacOS#{v}.sdk"
<add> elsif File.directory? "#{xcode_prefix}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX#{v}.sdk"
<add> # xcode_prefix is pretty smart, so lets look inside to find the sdk
<add> path = "#{xcode_prefix}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX#{v}.sdk"
<add> end
<add> if path.nil? or path.empty? or not File.directory? path
<add> nil
<add> else
<add> Pathname.new path
<add> end
<add> end
<add>
<add> def default_cc
<add> cc = locate 'cc'
<add> Pathname.new(cc).realpath.basename.to_s rescue nil
<add> end
<add>
<add> def default_compiler
<add> case default_cc
<add> when /^gcc/ then :gcc
<add> when /^llvm/ then :llvm
<add> when "clang" then :clang
<add> else
<add> # guess :(
<add> if xcode_version >= "4.3"
<add> :clang
<add> elsif xcode_version >= "4.2"
<add> :llvm
<add> else
<add> :gcc
<add> end
<add> end
<add> end
<add>
<add> def gcc_42_build_version
<add> @gcc_42_build_version ||= if File.exist? "#{dev_tools_path}/gcc-4.2" \
<add> and not Pathname.new("#{dev_tools_path}/gcc-4.2").realpath.basename.to_s =~ /^llvm/
<add> `#{dev_tools_path}/gcc-4.2 --version` =~ /build (\d{4,})/
<add> $1.to_i
<add> end
<add> end
<add>
<add> def gcc_40_build_version
<add> @gcc_40_build_version ||= if File.exist? "#{dev_tools_path}/gcc-4.0"
<add> `#{dev_tools_path}/gcc-4.0 --version` =~ /build (\d{4,})/
<add> $1.to_i
<add> end
<add> end
<add>
<add> def xcode_prefix
<add> @xcode_prefix ||= begin
<add> path = Pathname.new xcode_folder
<add> if $?.success? and path.absolute? and File.executable? "#{path}/usr/bin/make"
<add> path
<add> elsif File.executable? '/Developer/usr/bin/make'
<add> # we do this to support cowboys who insist on installing
<add> # only a subset of Xcode
<add> Pathname.new '/Developer'
<add> elsif File.executable? '/Applications/Xcode.app/Contents/Developer/usr/bin/make'
<add> # fallback for broken Xcode 4.3 installs
<add> Pathname.new '/Applications/Xcode.app/Contents/Developer'
<add> else
<add> # Ask Spotlight where Xcode is. If the user didn't install the
<add> # helper tools and installed Xcode in a non-conventional place, this
<add> # is our only option. See: http://superuser.com/questions/390757
<add> path = `mdfind "kMDItemCFBundleIdentifier == 'com.apple.dt.Xcode'"`.strip
<add> if path.empty?
<add> # Xcode 3 had a different identifier
<add> path = `mdfind "kMDItemCFBundleIdentifier == 'com.apple.Xcode'"`.strip
<add> end
<add> path = "#{path}/Contents/Developer"
<add> if !path.empty? and File.executable? "#{path}/usr/bin/make"
<add> Pathname.new path
<add> else
<add> nil
<add> end
<add> end
<add> end
<add> end
<add>
<add> def xcode_installed?
<add> # Telling us whether the Xcode.app is installed or not.
<add> @xcode_installed ||= begin
<add> if File.directory? '/Applications/Xcode.app'
<add> true
<add> elsif File.directory? '/Developer/Applications/Xcode.app' # old style
<add> true
<add> elsif not `mdfind "kMDItemCFBundleIdentifier == 'com.apple.dt.Xcode'"`.strip.empty?
<add> # Xcode 4
<add> true
<add> elsif not `mdfind "kMDItemCFBundleIdentifier == 'com.apple.Xcode'"`.strip.empty?
<add> # Xcode 3
<add> true
<add> else
<add> false
<add> end
<add> end
<add> end
<add>
<add> def xcode_version
<add> # may return a version string
<add> # that is guessed based on the compiler, so do not
<add> # use it in order to check if Xcode is installed.
<add> @xcode_version ||= begin
<add> return "0" unless MACOS
<add>
<add> # this shortcut makes xcode_version work for people who don't realise you
<add> # need to install the CLI tools
<add> xcode43build = "/Applications/Xcode.app/Contents/Developer/usr/bin/xcodebuild"
<add> if File.file? xcode43build
<add> `#{xcode43build} -version 2>/dev/null` =~ /Xcode (\d(\.\d)*)/
<add> return $1 if $1
<add> end
<add>
<add> # Xcode 4.3 xc* tools hang indefinately if xcode-select path is set thus
<add> raise if xctools_fucked?
<add>
<add> raise unless which "xcodebuild"
<add> `xcodebuild -version 2>/dev/null` =~ /Xcode (\d(\.\d)*)/
<add> raise if $1.nil? or not $?.success?
<add> $1
<add> rescue
<add> # For people who's xcode-select is unset, or who have installed
<add> # xcode-gcc-installer or whatever other combinations we can try and
<add> # supprt. See https://github.com/mxcl/homebrew/wiki/Xcode
<add> case llvm_build_version.to_i
<add> when 1..2063 then "3.1.0"
<add> when 2064..2065 then "3.1.4"
<add> when 2366..2325
<add> # we have no data for this range so we are guessing
<add> "3.2.0"
<add> when 2326
<add> # also applies to "3.2.3"
<add> "3.2.4"
<add> when 2327..2333 then "3.2.5"
<add> when 2335
<add> # this build number applies to 3.2.6, 4.0 and 4.1
<add> # https://github.com/mxcl/homebrew/wiki/Xcode
<add> "4.0"
<add> else
<add> case (clang_version.to_f * 10).to_i
<add> when 0
<add> "dunno"
<add> when 1..14
<add> "3.2.2"
<add> when 15
<add> "3.2.4"
<add> when 16
<add> "3.2.5"
<add> when 17..20
<add> "4.0"
<add> when 21
<add> "4.1"
<add> when 22..30
<add> "4.2"
<add> when 31
<add> "4.3"
<add> else
<add> "4.3"
<add> end
<add> end
<add> end
<add> end
<add>
<add> def llvm_build_version
<add> # for Xcode 3 on OS X 10.5 this will not exist
<add> # NOTE may not be true anymore but we can't test
<add> @llvm_build_version ||= if locate("llvm-gcc")
<add> `#{locate("llvm-gcc")} --version` =~ /LLVM build (\d{4,})/
<add> $1.to_i
<add> end
<add> end
<add>
<add> def clang_version
<add> @clang_version ||= if locate("clang")
<add> `#{locate("clang")} --version` =~ /clang version (\d\.\d)/
<add> $1
<add> end
<add> end
<add>
<add> def clang_build_version
<add> @clang_build_version ||= if locate("clang")
<add> `#{locate("clang")} --version` =~ %r[tags/Apple/clang-(\d{2,})]
<add> $1.to_i
<add> end
<add> end
<add>
<add> def x11_installed?
<add> # Even if only Xcode (without CLT) is installed, this dylib is there.
<add> Pathname.new('/usr/X11/lib/libpng.dylib').exist?
<add> end
<add>
<add> def macports_or_fink_installed?
<add> # See these issues for some history:
<add> # http://github.com/mxcl/homebrew/issues/#issue/13
<add> # http://github.com/mxcl/homebrew/issues/#issue/41
<add> # http://github.com/mxcl/homebrew/issues/#issue/48
<add> return false unless MACOS
<add>
<add> %w[port fink].each do |ponk|
<add> path = which(ponk)
<add> return ponk unless path.nil?
<add> end
<add>
<add> # we do the above check because macports can be relocated and fink may be
<add> # able to be relocated in the future. This following check is because if
<add> # fink and macports are not in the PATH but are still installed it can
<add> # *still* break the build -- because some build scripts hardcode these paths:
<add> %w[/sw/bin/fink /opt/local/bin/port].each do |ponk|
<add> return ponk if File.exist? ponk
<add> end
<add>
<add> # finally, sometimes people make their MacPorts or Fink read-only so they
<add> # can quickly test Homebrew out, but still in theory obey the README's
<add> # advise to rename the root directory. This doesn't work, many build scripts
<add> # error out when they try to read from these now unreadable directories.
<add> %w[/sw /opt/local].each do |path|
<add> path = Pathname.new(path)
<add> return path if path.exist? and not path.readable?
<add> end
<add>
<add> false
<add> end
<add>
<add> def leopard?
<add> 10.5 == MACOS_VERSION
<add> end
<add>
<add> def snow_leopard?
<add> 10.6 <= MACOS_VERSION # Actually Snow Leopard or newer
<add> end
<add>
<add> def lion?
<add> 10.7 <= MACOS_VERSION # Actually Lion or newer
<add> end
<add>
<add> def mountain_lion?
<add> 10.8 <= MACOS_VERSION # Actually Mountain Lion or newer
<add> end
<add>
<add> def prefer_64_bit?
<add> Hardware.is_64_bit? and not leopard?
<add> end
<add>
<add> StandardCompilers = {
<add> "3.1.4" => {:gcc_40_build_version=>5493, :gcc_42_build_version=>5577},
<add> "3.2.6" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"1.7", :clang_build_version=>77},
<add> "4.0" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"2.0", :clang_build_version=>137},
<add> "4.0.1" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"2.0", :clang_build_version=>137},
<add> "4.0.2" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"2.0", :clang_build_version=>137},
<add> "4.2" => {:llvm_build_version=>2336, :clang_version=>"3.0", :clang_build_version=>211},
<add> "4.3" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318},
<add> "4.3.1" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318},
<add> "4.3.2" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318},
<add> "4.3.3" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318}
<add> }
<add>
<add> def compilers_standard?
<add> xcode = MacOS.xcode_version
<add> # Assume compilers are okay if Xcode version not in hash
<add> return true unless StandardCompilers.keys.include? xcode
<add>
<add> StandardCompilers[xcode].all? {|k,v| MacOS.send(k) == v}
<add> end
<add>end
<ide><path>Library/Homebrew/utils.rb
<ide> require 'pathname'
<ide> require 'exceptions'
<add>require 'macos'
<ide>
<ide> class Tty
<ide> class <<self
<ide> def nostdout
<ide> end
<ide> end
<ide>
<del>module MacOS extend self
<del> def version
<del> MACOS_VERSION
<del> end
<del>
<del> def cat
<del> if mountain_lion?
<del> :mountainlion
<del> elsif lion?
<del> :lion
<del> elsif snow_leopard?
<del> :snowleopard
<del> elsif leopard?
<del> :leopard
<del> else
<del> nil
<del> end
<del> end
<del>
<del> def clt_installed?
<del> # If the command line tools are installed, most unix standard
<del> # tools, libs and headers are in /usr.
<del> # Returns true, also for older Xcode/OSX versions that had everything in /usr
<del> # Beginning with Xcode 4.3, the dev tools are no longer installed
<del> # in /usr and SDKs no longer in /Developer by default.
<del> # But Apple provides an optional "Command Line Tools for Xcode" package.
<del> not clt_version.empty? or dev_tools_path == Pathname.new("/usr/bin")
<del> end
<del>
<del> def clt_version
<del> # Version string (a pretty damn long one) of the CLT package.
<del> # Note, that different ways to install the CLTs lead to different
<del> # version numbers.
<del> @clt_version ||= begin
<del> # CLT installed via stand-alone website download
<del> clt_pkginfo_stand_alone = `pkgutil --pkg-info com.apple.pkg.DeveloperToolsCLILeo 2>/dev/null`.strip
<del> # CLT installed via preferences from within Xcode
<del> clt_pkginfo_from_xcode = `pkgutil --pkg-info com.apple.pkg.DeveloperToolsCLI 2>/dev/null`.strip
<del> if not clt_pkginfo_stand_alone.empty?
<del> clt_pkginfo_stand_alone =~ /version: (.*)$/
<del> $1
<del> elsif not clt_pkginfo_from_xcode.empty?
<del> clt_pkginfo_from_xcode =~ /version: (.*)$/
<del> $1
<del> else
<del> # We return "" instead of nil because we want clt_installed? to be true on older Macs.
<del> # So clt_version.empty? does not mean there are no unix tools in /usr, it just means
<del> # that the "Command Line Tools for Xcode" package is not installed
<del> "" # No CLT or recipe available to pkgutil.
<del> end
<del> end
<del> end
<del>
<del> # Locate the "current Xcode folder" via xcode-select. See:
<del> # man xcode-select
<del> def xcode_folder
<del> @xcode_folder ||= `xcode-select -print-path 2>/dev/null`.strip
<del> end
<del>
<del> # Xcode 4.3 tools hang if "/" is set
<del> def xctools_fucked?
<del> xcode_folder == "/"
<del> end
<del>
<del> def locate tool
<del> # Don't call tools (cc, make, strip, etc.) directly!
<del> # Give the name of the binary you look for as a string to this method
<del> # in order to get the full path back as a Pathname.
<del> tool = tool.to_s
<del>
<del> @locate_cache ||= {}
<del> return @locate_cache[tool] if @locate_cache.has_key? tool
<del>
<del> if File.executable? "/usr/bin/#{tool}"
<del> path = Pathname.new "/usr/bin/#{tool}"
<del> else
<del> # Xcrun was provided first with Xcode 4.3 and allows us to proxy
<del> # tool usage thus avoiding various bugs.
<del> p = `/usr/bin/xcrun -find #{tool} 2>/dev/null`.chomp unless MacOS.xctools_fucked?
<del> if !p.nil? and !p.empty? and File.executable? p
<del> path = Pathname.new p
<del> else
<del> # This is for the use-case where xcode-select is not set up correctly
<del> # with Xcode 4.3+. The tools in Xcode 4.3+ are split over two locations,
<del> # usually xcrun would figure that out for us, but it won't work if
<del> # xcode-select is not configured properly.
<del> p = "#{MacOS.dev_tools_path}/#{tool}"
<del> if File.executable? p
<del> path = Pathname.new p
<del> else
<del> # Otherwise lets look in the second location.
<del> p = "#{MacOS.xctoolchain_path}/usr/bin/#{tool}"
<del> if File.executable? p
<del> path = Pathname.new p
<del> else
<del> # We digged so deep but all is lost now.
<del> path = nil
<del> end
<del> end
<del> end
<del> end
<del> @locate_cache[tool] = path
<del> return path
<del> end
<del>
<del> def dev_tools_path
<del> @dev_tools_path ||= if File.exist? "/usr/bin/cc" and File.exist? "/usr/bin/make"
<del> # probably a safe enough assumption (the unix way)
<del> Pathname.new "/usr/bin"
<del> elsif not xctools_fucked? and system "/usr/bin/xcrun -find make 1>/dev/null 2>&1"
<del> # Wherever "make" is there are the dev tools.
<del> Pathname.new(`/usr/bin/xcrun -find make`.chomp).dirname
<del> elsif File.exist? "#{xcode_prefix}/usr/bin/make"
<del> # cc stopped existing with Xcode 4.3, there are c89 and c99 options though
<del> Pathname.new "#{xcode_prefix}/usr/bin"
<del> else
<del> # Since we are pretty unrelenting in finding Xcode no matter where
<del> # it hides, we can now throw in the towel.
<del> opoo "You really should consult the `brew doctor`!"
<del> ""
<del> end
<del> end
<del>
<del> def xctoolchain_path
<del> # Beginning with Xcode 4.3, clang and some other tools are located in a xctoolchain dir.
<del> @xctoolchain_path ||= begin
<del> path = Pathname.new("#{MacOS.xcode_prefix}/Toolchains/XcodeDefault.xctoolchain")
<del> if path.exist?
<del> path
<del> else
<del> # ok, there are no Toolchains in xcode_prefix
<del> # and that's ok as long as everything is in dev_tools_path="/usr/bin" (i.e. clt_installed?)
<del> nil
<del> end
<del> end
<del> end
<del>
<del> def sdk_path(v=MacOS.version)
<del> # The path of the MacOSX SDK.
<del> if !MacOS.xctools_fucked? and File.executable? "#{xcode_folder}/usr/bin/make"
<del> path = `#{locate('xcodebuild')} -version -sdk macosx#{v} Path 2>/dev/null`.strip
<del> elsif File.directory? '/Developer/SDKs/MacOS#{v}.sdk'
<del> # the old default (or wild wild west style)
<del> path = "/Developer/SDKs/MacOS#{v}.sdk"
<del> elsif File.directory? "#{xcode_prefix}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX#{v}.sdk"
<del> # xcode_prefix is pretty smart, so lets look inside to find the sdk
<del> path = "#{xcode_prefix}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX#{v}.sdk"
<del> end
<del> if path.nil? or path.empty? or not File.directory? path
<del> nil
<del> else
<del> Pathname.new path
<del> end
<del> end
<del>
<del> def default_cc
<del> cc = locate 'cc'
<del> Pathname.new(cc).realpath.basename.to_s rescue nil
<del> end
<del>
<del> def default_compiler
<del> case default_cc
<del> when /^gcc/ then :gcc
<del> when /^llvm/ then :llvm
<del> when "clang" then :clang
<del> else
<del> # guess :(
<del> if xcode_version >= "4.3"
<del> :clang
<del> elsif xcode_version >= "4.2"
<del> :llvm
<del> else
<del> :gcc
<del> end
<del> end
<del> end
<del>
<del> def gcc_42_build_version
<del> @gcc_42_build_version ||= if File.exist? "#{dev_tools_path}/gcc-4.2" \
<del> and not Pathname.new("#{dev_tools_path}/gcc-4.2").realpath.basename.to_s =~ /^llvm/
<del> `#{dev_tools_path}/gcc-4.2 --version` =~ /build (\d{4,})/
<del> $1.to_i
<del> end
<del> end
<del>
<del> def gcc_40_build_version
<del> @gcc_40_build_version ||= if File.exist? "#{dev_tools_path}/gcc-4.0"
<del> `#{dev_tools_path}/gcc-4.0 --version` =~ /build (\d{4,})/
<del> $1.to_i
<del> end
<del> end
<del>
<del> def xcode_prefix
<del> @xcode_prefix ||= begin
<del> path = Pathname.new xcode_folder
<del> if $?.success? and path.absolute? and File.executable? "#{path}/usr/bin/make"
<del> path
<del> elsif File.executable? '/Developer/usr/bin/make'
<del> # we do this to support cowboys who insist on installing
<del> # only a subset of Xcode
<del> Pathname.new '/Developer'
<del> elsif File.executable? '/Applications/Xcode.app/Contents/Developer/usr/bin/make'
<del> # fallback for broken Xcode 4.3 installs
<del> Pathname.new '/Applications/Xcode.app/Contents/Developer'
<del> else
<del> # Ask Spotlight where Xcode is. If the user didn't install the
<del> # helper tools and installed Xcode in a non-conventional place, this
<del> # is our only option. See: http://superuser.com/questions/390757
<del> path = `mdfind "kMDItemCFBundleIdentifier == 'com.apple.dt.Xcode'"`.strip
<del> if path.empty?
<del> # Xcode 3 had a different identifier
<del> path = `mdfind "kMDItemCFBundleIdentifier == 'com.apple.Xcode'"`.strip
<del> end
<del> path = "#{path}/Contents/Developer"
<del> if !path.empty? and File.executable? "#{path}/usr/bin/make"
<del> Pathname.new path
<del> else
<del> nil
<del> end
<del> end
<del> end
<del> end
<del>
<del> def xcode_installed?
<del> # Telling us whether the Xcode.app is installed or not.
<del> @xcode_installed ||= begin
<del> if File.directory? '/Applications/Xcode.app'
<del> true
<del> elsif File.directory? '/Developer/Applications/Xcode.app' # old style
<del> true
<del> elsif not `mdfind "kMDItemCFBundleIdentifier == 'com.apple.dt.Xcode'"`.strip.empty?
<del> # Xcode 4
<del> true
<del> elsif not `mdfind "kMDItemCFBundleIdentifier == 'com.apple.Xcode'"`.strip.empty?
<del> # Xcode 3
<del> true
<del> else
<del> false
<del> end
<del> end
<del> end
<del>
<del> def xcode_version
<del> # may return a version string
<del> # that is guessed based on the compiler, so do not
<del> # use it in order to check if Xcode is installed.
<del> @xcode_version ||= begin
<del> return "0" unless MACOS
<del>
<del> # this shortcut makes xcode_version work for people who don't realise you
<del> # need to install the CLI tools
<del> xcode43build = "/Applications/Xcode.app/Contents/Developer/usr/bin/xcodebuild"
<del> if File.file? xcode43build
<del> `#{xcode43build} -version 2>/dev/null` =~ /Xcode (\d(\.\d)*)/
<del> return $1 if $1
<del> end
<del>
<del> # Xcode 4.3 xc* tools hang indefinately if xcode-select path is set thus
<del> raise if xctools_fucked?
<del>
<del> raise unless which "xcodebuild"
<del> `xcodebuild -version 2>/dev/null` =~ /Xcode (\d(\.\d)*)/
<del> raise if $1.nil? or not $?.success?
<del> $1
<del> rescue
<del> # For people who's xcode-select is unset, or who have installed
<del> # xcode-gcc-installer or whatever other combinations we can try and
<del> # supprt. See https://github.com/mxcl/homebrew/wiki/Xcode
<del> case llvm_build_version.to_i
<del> when 1..2063 then "3.1.0"
<del> when 2064..2065 then "3.1.4"
<del> when 2366..2325
<del> # we have no data for this range so we are guessing
<del> "3.2.0"
<del> when 2326
<del> # also applies to "3.2.3"
<del> "3.2.4"
<del> when 2327..2333 then "3.2.5"
<del> when 2335
<del> # this build number applies to 3.2.6, 4.0 and 4.1
<del> # https://github.com/mxcl/homebrew/wiki/Xcode
<del> "4.0"
<del> else
<del> case (clang_version.to_f * 10).to_i
<del> when 0
<del> "dunno"
<del> when 1..14
<del> "3.2.2"
<del> when 15
<del> "3.2.4"
<del> when 16
<del> "3.2.5"
<del> when 17..20
<del> "4.0"
<del> when 21
<del> "4.1"
<del> when 22..30
<del> "4.2"
<del> when 31
<del> "4.3"
<del> else
<del> "4.3"
<del> end
<del> end
<del> end
<del> end
<del>
<del> def llvm_build_version
<del> # for Xcode 3 on OS X 10.5 this will not exist
<del> # NOTE may not be true anymore but we can't test
<del> @llvm_build_version ||= if locate("llvm-gcc")
<del> `#{locate("llvm-gcc")} --version` =~ /LLVM build (\d{4,})/
<del> $1.to_i
<del> end
<del> end
<del>
<del> def clang_version
<del> @clang_version ||= if locate("clang")
<del> `#{locate("clang")} --version` =~ /clang version (\d\.\d)/
<del> $1
<del> end
<del> end
<del>
<del> def clang_build_version
<del> @clang_build_version ||= if locate("clang")
<del> `#{locate("clang")} --version` =~ %r[tags/Apple/clang-(\d{2,})]
<del> $1.to_i
<del> end
<del> end
<del>
<del> def x11_installed?
<del> # Even if only Xcode (without CLT) is installed, this dylib is there.
<del> Pathname.new('/usr/X11/lib/libpng.dylib').exist?
<del> end
<del>
<del> def macports_or_fink_installed?
<del> # See these issues for some history:
<del> # http://github.com/mxcl/homebrew/issues/#issue/13
<del> # http://github.com/mxcl/homebrew/issues/#issue/41
<del> # http://github.com/mxcl/homebrew/issues/#issue/48
<del> return false unless MACOS
<del>
<del> %w[port fink].each do |ponk|
<del> path = which(ponk)
<del> return ponk unless path.nil?
<del> end
<del>
<del> # we do the above check because macports can be relocated and fink may be
<del> # able to be relocated in the future. This following check is because if
<del> # fink and macports are not in the PATH but are still installed it can
<del> # *still* break the build -- because some build scripts hardcode these paths:
<del> %w[/sw/bin/fink /opt/local/bin/port].each do |ponk|
<del> return ponk if File.exist? ponk
<del> end
<del>
<del> # finally, sometimes people make their MacPorts or Fink read-only so they
<del> # can quickly test Homebrew out, but still in theory obey the README's
<del> # advise to rename the root directory. This doesn't work, many build scripts
<del> # error out when they try to read from these now unreadable directories.
<del> %w[/sw /opt/local].each do |path|
<del> path = Pathname.new(path)
<del> return path if path.exist? and not path.readable?
<del> end
<del>
<del> false
<del> end
<del>
<del> def leopard?
<del> 10.5 == MACOS_VERSION
<del> end
<del>
<del> def snow_leopard?
<del> 10.6 <= MACOS_VERSION # Actually Snow Leopard or newer
<del> end
<del>
<del> def lion?
<del> 10.7 <= MACOS_VERSION # Actually Lion or newer
<del> end
<del>
<del> def mountain_lion?
<del> 10.8 <= MACOS_VERSION # Actually Mountain Lion or newer
<del> end
<del>
<del> def prefer_64_bit?
<del> Hardware.is_64_bit? and not leopard?
<del> end
<del>
<del> StandardCompilers = {
<del> "3.1.4" => {:gcc_40_build_version=>5493, :gcc_42_build_version=>5577},
<del> "3.2.6" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"1.7", :clang_build_version=>77},
<del> "4.0" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"2.0", :clang_build_version=>137},
<del> "4.0.1" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"2.0", :clang_build_version=>137},
<del> "4.0.2" => {:gcc_40_build_version=>5494, :gcc_42_build_version=>5666, :llvm_build_version=>2335, :clang_version=>"2.0", :clang_build_version=>137},
<del> "4.2" => {:llvm_build_version=>2336, :clang_version=>"3.0", :clang_build_version=>211},
<del> "4.3" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318},
<del> "4.3.1" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318},
<del> "4.3.2" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318},
<del> "4.3.3" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318}
<del> }
<del>
<del> def compilers_standard?
<del> xcode = MacOS.xcode_version
<del> # Assume compilers are okay if Xcode version not in hash
<del> return true unless StandardCompilers.keys.include? xcode
<del>
<del> StandardCompilers[xcode].all? {|k,v| MacOS.send(k) == v}
<del> end
<del>end
<del>
<ide> module GitHub extend self
<ide> def issues_for_formula name
<ide> # bit basic as depends on the issue at github having the exact name of the | 2 |
Python | Python | create dataparallel model if several gpus | 5f432480c0f9f89e56fec9bf428108637b600f98 | <ide><path>extract_features_pytorch.py
<ide> def main():
<ide> if args.init_checkpoint is not None:
<ide> model.load_state_dict(torch.load(args.init_checkpoint, map_location='cpu'))
<ide> model.to(device)
<add>
<add> if n_gpu > 1:
<add> model = nn.DataParallel(model)
<ide>
<ide> all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
<ide> all_input_mask = torch.tensor([f.input_mask for f in features], dtype=torch.long)
<ide><path>run_classifier_pytorch.py
<ide> def main():
<ide> if args.init_checkpoint is not None:
<ide> model.bert.load_state_dict(torch.load(args.init_checkpoint, map_location='cpu'))
<ide> model.to(device)
<add>
<add> if n_gpu > 1:
<add> model = torch.nn.DataParallel(model)
<ide>
<ide> optimizer = BERTAdam([{'params': [p for n, p in model.named_parameters() if n != 'bias'], 'l2': 0.01},
<ide> {'params': [p for n, p in model.named_parameters() if n == 'bias'], 'l2': 0.}
<ide><path>run_squad_pytorch.py
<ide> def main():
<ide> if args.init_checkpoint is not None:
<ide> model.bert.load_state_dict(torch.load(args.init_checkpoint, map_location='cpu'))
<ide> model.to(device)
<add>
<add> if n_gpu > 1:
<add> model = torch.nn.DataParallel(model)
<ide>
<ide> optimizer = BERTAdam([{'params': [p for n, p in model.named_parameters() if n != 'bias'], 'l2': 0.01},
<ide> {'params': [p for n, p in model.named_parameters() if n == 'bias'], 'l2': 0.} | 3 |
Javascript | Javascript | use strict assertions in module loader test | 443e218544b82c02643a97666b505ffb0f18751f | <ide><path>test/sequential/test-module-loading.js
<ide> var fs = require('fs');
<ide> console.error('load test-module-loading.js');
<ide>
<ide> // assert that this is the main module.
<del>assert.equal(require.main.id, '.', 'main module should have id of \'.\'');
<del>assert.equal(require.main, module, 'require.main should === module');
<del>assert.equal(process.mainModule, module,
<del> 'process.mainModule should === module');
<add>assert.strictEqual(require.main.id, '.', 'main module should have id of \'.\'');
<add>assert.strictEqual(require.main, module, 'require.main should === module');
<add>assert.strictEqual(process.mainModule, module,
<add> 'process.mainModule should === module');
<ide> // assert that it's *not* the main module in the required module.
<ide> require('../fixtures/not-main-module.js');
<ide>
<ide> // require a file with a request that includes the extension
<ide> var a_js = require('../fixtures/a.js');
<del>assert.equal(42, a_js.number);
<add>assert.strictEqual(42, a_js.number);
<ide>
<ide> // require a file without any extensions
<ide> var foo_no_ext = require('../fixtures/foo');
<del>assert.equal('ok', foo_no_ext.foo);
<add>assert.strictEqual('ok', foo_no_ext.foo);
<ide>
<ide> var a = require('../fixtures/a');
<ide> var c = require('../fixtures/b/c');
<ide> var d3 = require(path.join(__dirname, '../fixtures/b/d'));
<ide> // Relative
<ide> var d4 = require('../fixtures/b/d');
<ide>
<del>assert.equal(false, false, 'testing the test program.');
<add>assert.strictEqual(false, false, 'testing the test program.');
<ide>
<ide> assert.ok(a.A instanceof Function);
<del>assert.equal('A', a.A());
<add>assert.strictEqual('A', a.A());
<ide>
<ide> assert.ok(a.C instanceof Function);
<del>assert.equal('C', a.C());
<add>assert.strictEqual('C', a.C());
<ide>
<ide> assert.ok(a.D instanceof Function);
<del>assert.equal('D', a.D());
<add>assert.strictEqual('D', a.D());
<ide>
<ide> assert.ok(d.D instanceof Function);
<del>assert.equal('D', d.D());
<add>assert.strictEqual('D', d.D());
<ide>
<ide> assert.ok(d2.D instanceof Function);
<del>assert.equal('D', d2.D());
<add>assert.strictEqual('D', d2.D());
<ide>
<ide> assert.ok(d3.D instanceof Function);
<del>assert.equal('D', d3.D());
<add>assert.strictEqual('D', d3.D());
<ide>
<ide> assert.ok(d4.D instanceof Function);
<del>assert.equal('D', d4.D());
<add>assert.strictEqual('D', d4.D());
<ide>
<ide> assert.ok((new a.SomeClass()) instanceof c.SomeClass);
<ide>
<ide> console.error('test index.js modules ids and relative loading');
<ide> const one = require('../fixtures/nested-index/one');
<ide> const two = require('../fixtures/nested-index/two');
<del>assert.notEqual(one.hello, two.hello);
<add>assert.notStrictEqual(one.hello, two.hello);
<ide>
<ide> console.error('test index.js in a folder with a trailing slash');
<ide> const three = require('../fixtures/nested-index/three');
<ide> const threeFolder = require('../fixtures/nested-index/three/');
<ide> const threeIndex = require('../fixtures/nested-index/three/index.js');
<del>assert.equal(threeFolder, threeIndex);
<del>assert.notEqual(threeFolder, three);
<add>assert.strictEqual(threeFolder, threeIndex);
<add>assert.notStrictEqual(threeFolder, three);
<ide>
<ide> console.error('test package.json require() loading');
<del>assert.equal(require('../fixtures/packages/index').ok, 'ok',
<del> 'Failed loading package');
<del>assert.equal(require('../fixtures/packages/main').ok, 'ok',
<del> 'Failed loading package');
<del>assert.equal(require('../fixtures/packages/main-index').ok, 'ok',
<del> 'Failed loading package with index.js in main subdir');
<add>assert.strictEqual(require('../fixtures/packages/index').ok, 'ok',
<add> 'Failed loading package');
<add>assert.strictEqual(require('../fixtures/packages/main').ok, 'ok',
<add> 'Failed loading package');
<add>assert.strictEqual(require('../fixtures/packages/main-index').ok, 'ok',
<add> 'Failed loading package with index.js in main subdir');
<ide>
<ide> console.error('test cycles containing a .. path');
<ide> const root = require('../fixtures/cycles/root');
<ide> const foo = require('../fixtures/cycles/folder/foo');
<del>assert.equal(root.foo, foo);
<del>assert.equal(root.sayHello(), root.hello);
<add>assert.strictEqual(root.foo, foo);
<add>assert.strictEqual(root.sayHello(), root.hello);
<ide>
<ide> console.error('test node_modules folders');
<ide> // asserts are in the fixtures files themselves,
<ide> try {
<ide> require('../fixtures/throws_error');
<ide> } catch (e) {
<ide> errorThrown = true;
<del> assert.equal('blah', e.message);
<add> assert.strictEqual('blah', e.message);
<ide> }
<ide>
<del>assert.equal(require('path').dirname(__filename), __dirname);
<add>assert.strictEqual(require('path').dirname(__filename), __dirname);
<ide>
<ide> console.error('load custom file types with extensions');
<ide> require.extensions['.test'] = function(module, filename) {
<ide> var content = fs.readFileSync(filename).toString();
<del> assert.equal('this is custom source\n', content);
<add> assert.strictEqual('this is custom source\n', content);
<ide> content = content.replace('this is custom source',
<ide> 'exports.test = \'passed\'');
<ide> module._compile(content, filename);
<ide> };
<ide>
<del>assert.equal(require('../fixtures/registerExt').test, 'passed');
<add>assert.strictEqual(require('../fixtures/registerExt').test, 'passed');
<ide> // unknown extension, load as .js
<del>assert.equal(require('../fixtures/registerExt.hello.world').test, 'passed');
<add>assert.strictEqual(require('../fixtures/registerExt.hello.world').test,
<add> 'passed');
<ide>
<ide> console.error('load custom file types that return non-strings');
<ide> require.extensions['.test'] = function(module, filename) {
<ide> require.extensions['.test'] = function(module, filename) {
<ide> };
<ide> };
<ide>
<del>assert.equal(require('../fixtures/registerExt2').custom, 'passed');
<add>assert.strictEqual(require('../fixtures/registerExt2').custom, 'passed');
<ide>
<del>assert.equal(require('../fixtures/foo').foo, 'ok',
<del> 'require module with no extension');
<add>assert.strictEqual(require('../fixtures/foo').foo, 'ok',
<add> 'require module with no extension');
<ide>
<ide> // Should not attempt to load a directory
<ide> try {
<ide> require('../fixtures/empty');
<ide> } catch (err) {
<del> assert.equal(err.message, 'Cannot find module \'../fixtures/empty\'');
<add> assert.strictEqual(err.message, 'Cannot find module \'../fixtures/empty\'');
<ide> }
<ide>
<ide> // Check load order is as expected
<ide> const msg = 'Load order incorrect.';
<ide> require.extensions['.reg'] = require.extensions['.js'];
<ide> require.extensions['.reg2'] = require.extensions['.js'];
<ide>
<del>assert.equal(require(loadOrder + 'file1').file1, 'file1', msg);
<del>assert.equal(require(loadOrder + 'file2').file2, 'file2.js', msg);
<add>assert.strictEqual(require(loadOrder + 'file1').file1, 'file1', msg);
<add>assert.strictEqual(require(loadOrder + 'file2').file2, 'file2.js', msg);
<ide> try {
<ide> require(loadOrder + 'file3');
<ide> } catch (e) {
<ide> // Not a real .node module, but we know we require'd the right thing.
<ide> assert.ok(e.message.replace(/\\/g, '/').match(/file3\.node/));
<ide> }
<del>assert.equal(require(loadOrder + 'file4').file4, 'file4.reg', msg);
<del>assert.equal(require(loadOrder + 'file5').file5, 'file5.reg2', msg);
<del>assert.equal(require(loadOrder + 'file6').file6, 'file6/index.js', msg);
<add>assert.strictEqual(require(loadOrder + 'file4').file4, 'file4.reg', msg);
<add>assert.strictEqual(require(loadOrder + 'file5').file5, 'file5.reg2', msg);
<add>assert.strictEqual(require(loadOrder + 'file6').file6, 'file6/index.js', msg);
<ide> try {
<ide> require(loadOrder + 'file7');
<ide> } catch (e) {
<ide> assert.ok(e.message.replace(/\\/g, '/').match(/file7\/index\.node/));
<ide> }
<del>assert.equal(require(loadOrder + 'file8').file8, 'file8/index.reg', msg);
<del>assert.equal(require(loadOrder + 'file9').file9, 'file9/index.reg2', msg);
<add>assert.strictEqual(require(loadOrder + 'file8').file8, 'file8/index.reg', msg);
<add>assert.strictEqual(require(loadOrder + 'file9').file9, 'file9/index.reg2', msg);
<ide>
<ide>
<ide> // make sure that module.require() is the same as
<ide> // doing require() inside of that module.
<ide> var parent = require('../fixtures/module-require/parent/');
<ide> var child = require('../fixtures/module-require/child/');
<del>assert.equal(child.loaded, parent.loaded);
<add>assert.strictEqual(child.loaded, parent.loaded);
<ide>
<ide>
<ide> // #1357 Loading JSON files with require()
<ide> assert.throws(function() {
<ide>
<ide> process.on('exit', function() {
<ide> assert.ok(a.A instanceof Function);
<del> assert.equal('A done', a.A());
<add> assert.strictEqual('A done', a.A());
<ide>
<ide> assert.ok(a.C instanceof Function);
<del> assert.equal('C done', a.C());
<add> assert.strictEqual('C done', a.C());
<ide>
<ide> assert.ok(a.D instanceof Function);
<del> assert.equal('D done', a.D());
<add> assert.strictEqual('D done', a.D());
<ide>
<ide> assert.ok(d.D instanceof Function);
<del> assert.equal('D done', d.D());
<add> assert.strictEqual('D done', d.D());
<ide>
<ide> assert.ok(d2.D instanceof Function);
<del> assert.equal('D done', d2.D());
<add> assert.strictEqual('D done', d2.D());
<ide>
<del> assert.equal(true, errorThrown);
<add> assert.strictEqual(true, errorThrown);
<ide>
<ide> console.log('exit');
<ide> });
<ide>
<ide>
<ide> // #1440 Loading files with a byte order marker.
<del>assert.equal(42, require('../fixtures/utf8-bom.js'));
<del>assert.equal(42, require('../fixtures/utf8-bom.json'));
<add>assert.strictEqual(42, require('../fixtures/utf8-bom.js'));
<add>assert.strictEqual(42, require('../fixtures/utf8-bom.json'));
<ide>
<ide> // Error on the first line of a module should
<ide> // have the correct line number | 1 |
Javascript | Javascript | raise error when null bytes detected in paths | 33fa7405778444ca66470ab0729e6fa9fe43d2a6 | <ide><path>lib/fs.js
<ide> function assertEncoding(encoding) {
<ide> }
<ide> }
<ide>
<add>function nullCheck(path, callback) {
<add> if (('' + path).indexOf('\u0000') !== -1) {
<add> var er = new Error('Path must be a string without null bytes.');
<add> if (!callback)
<add> throw er;
<add> process.nextTick(function() {
<add> callback(er);
<add> });
<add> return false;
<add> }
<add> return true;
<add>}
<ide>
<ide> fs.Stats = binding.Stats;
<ide>
<ide> fs.Stats.prototype.isSocket = function() {
<ide> };
<ide>
<ide> fs.exists = function(path, callback) {
<del> binding.stat(pathModule._makeLong(path), function(err, stats) {
<add> if (!nullCheck(path, cb)) return;
<add> binding.stat(pathModule._makeLong(path), cb);
<add> function cb(err, stats) {
<ide> if (callback) callback(err ? false : true);
<del> });
<add> }
<ide> };
<ide>
<ide> fs.existsSync = function(path) {
<ide> try {
<add> nullCheck(path);
<ide> binding.stat(pathModule._makeLong(path));
<ide> return true;
<ide> } catch (e) {
<ide> fs.open = function(path, flags, mode, callback) {
<ide> callback = makeCallback(arguments[arguments.length - 1]);
<ide> mode = modeNum(mode, 438 /*=0666*/);
<ide>
<add> if (!nullCheck(path, callback)) return;
<ide> binding.open(pathModule._makeLong(path),
<ide> stringToFlags(flags),
<ide> mode,
<ide> fs.open = function(path, flags, mode, callback) {
<ide>
<ide> fs.openSync = function(path, flags, mode) {
<ide> mode = modeNum(mode, 438 /*=0666*/);
<add> nullCheck(path);
<ide> return binding.open(pathModule._makeLong(path), stringToFlags(flags), mode);
<ide> };
<ide>
<ide> fs.writeSync = function(fd, buffer, offset, length, position) {
<ide> };
<ide>
<ide> fs.rename = function(oldPath, newPath, callback) {
<add> callback = makeCallback(callback);
<add> if (!nullCheck(oldPath, callback)) return;
<add> if (!nullCheck(newPath, callback)) return;
<ide> binding.rename(pathModule._makeLong(oldPath),
<ide> pathModule._makeLong(newPath),
<del> makeCallback(callback));
<add> callback);
<ide> };
<ide>
<ide> fs.renameSync = function(oldPath, newPath) {
<add> nullCheck(oldPath);
<add> nullCheck(newPath);
<ide> return binding.rename(pathModule._makeLong(oldPath),
<ide> pathModule._makeLong(newPath));
<ide> };
<ide> fs.ftruncateSync = function(fd, len) {
<ide> };
<ide>
<ide> fs.rmdir = function(path, callback) {
<del> binding.rmdir(pathModule._makeLong(path), makeCallback(callback));
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<add> binding.rmdir(pathModule._makeLong(path), callback);
<ide> };
<ide>
<ide> fs.rmdirSync = function(path) {
<add> nullCheck(path);
<ide> return binding.rmdir(pathModule._makeLong(path));
<ide> };
<ide>
<ide> fs.fsyncSync = function(fd) {
<ide>
<ide> fs.mkdir = function(path, mode, callback) {
<ide> if (typeof mode === 'function') callback = mode;
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<ide> binding.mkdir(pathModule._makeLong(path),
<ide> modeNum(mode, 511 /*=0777*/),
<del> makeCallback(callback));
<add> callback);
<ide> };
<ide>
<ide> fs.mkdirSync = function(path, mode) {
<add> nullCheck(path);
<ide> return binding.mkdir(pathModule._makeLong(path),
<ide> modeNum(mode, 511 /*=0777*/));
<ide> };
<ide> fs.sendfileSync = function(outFd, inFd, inOffset, length) {
<ide> };
<ide>
<ide> fs.readdir = function(path, callback) {
<del> binding.readdir(pathModule._makeLong(path), makeCallback(callback));
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<add> binding.readdir(pathModule._makeLong(path), callback);
<ide> };
<ide>
<ide> fs.readdirSync = function(path) {
<add> nullCheck(path);
<ide> return binding.readdir(pathModule._makeLong(path));
<ide> };
<ide>
<ide> fs.fstat = function(fd, callback) {
<ide> };
<ide>
<ide> fs.lstat = function(path, callback) {
<del> binding.lstat(pathModule._makeLong(path), makeCallback(callback));
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<add> binding.lstat(pathModule._makeLong(path), callback);
<ide> };
<ide>
<ide> fs.stat = function(path, callback) {
<del> binding.stat(pathModule._makeLong(path), makeCallback(callback));
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<add> binding.stat(pathModule._makeLong(path), callback);
<ide> };
<ide>
<ide> fs.fstatSync = function(fd) {
<ide> return binding.fstat(fd);
<ide> };
<ide>
<ide> fs.lstatSync = function(path) {
<add> nullCheck(path);
<ide> return binding.lstat(pathModule._makeLong(path));
<ide> };
<ide>
<ide> fs.statSync = function(path) {
<add> nullCheck(path);
<ide> return binding.stat(pathModule._makeLong(path));
<ide> };
<ide>
<ide> fs.readlink = function(path, callback) {
<del> binding.readlink(pathModule._makeLong(path), makeCallback(callback));
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<add> binding.readlink(pathModule._makeLong(path), callback);
<ide> };
<ide>
<ide> fs.readlinkSync = function(path) {
<add> nullCheck(path);
<ide> return binding.readlink(pathModule._makeLong(path));
<ide> };
<ide>
<ide> fs.symlink = function(destination, path, type_, callback) {
<ide> var type = (typeof type_ === 'string' ? type_ : null);
<ide> var callback = makeCallback(arguments[arguments.length - 1]);
<ide>
<add> if (!nullCheck(destination, callback)) return;
<add> if (!nullCheck(path, callback)) return;
<add>
<ide> binding.symlink(preprocessSymlinkDestination(destination, type),
<ide> pathModule._makeLong(path),
<ide> type,
<ide> fs.symlink = function(destination, path, type_, callback) {
<ide> fs.symlinkSync = function(destination, path, type) {
<ide> type = (typeof type === 'string' ? type : null);
<ide>
<add> nullCheck(destination);
<add> nullCheck(path);
<add>
<ide> return binding.symlink(preprocessSymlinkDestination(destination, type),
<ide> pathModule._makeLong(path),
<ide> type);
<ide> };
<ide>
<ide> fs.link = function(srcpath, dstpath, callback) {
<add> callback = makeCallback(callback);
<add> if (!nullCheck(srcpath, callback)) return;
<add> if (!nullCheck(dstpath, callback)) return;
<add>
<ide> binding.link(pathModule._makeLong(srcpath),
<ide> pathModule._makeLong(dstpath),
<del> makeCallback(callback));
<add> callback);
<ide> };
<ide>
<ide> fs.linkSync = function(srcpath, dstpath) {
<add> nullCheck(srcpath);
<add> nullCheck(dstpath);
<ide> return binding.link(pathModule._makeLong(srcpath),
<ide> pathModule._makeLong(dstpath));
<ide> };
<ide>
<ide> fs.unlink = function(path, callback) {
<del> binding.unlink(pathModule._makeLong(path), makeCallback(callback));
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<add> binding.unlink(pathModule._makeLong(path), callback);
<ide> };
<ide>
<ide> fs.unlinkSync = function(path) {
<add> nullCheck(path);
<ide> return binding.unlink(pathModule._makeLong(path));
<ide> };
<ide>
<ide> if (constants.hasOwnProperty('O_SYMLINK')) {
<ide>
<ide>
<ide> fs.chmod = function(path, mode, callback) {
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<ide> binding.chmod(pathModule._makeLong(path),
<ide> modeNum(mode),
<del> makeCallback(callback));
<add> callback);
<ide> };
<ide>
<ide> fs.chmodSync = function(path, mode) {
<add> nullCheck(path);
<ide> return binding.chmod(pathModule._makeLong(path), modeNum(mode));
<ide> };
<ide>
<ide> fs.fchownSync = function(fd, uid, gid) {
<ide> };
<ide>
<ide> fs.chown = function(path, uid, gid, callback) {
<del> binding.chown(pathModule._makeLong(path), uid, gid, makeCallback(callback));
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<add> binding.chown(pathModule._makeLong(path), uid, gid, callback);
<ide> };
<ide>
<ide> fs.chownSync = function(path, uid, gid) {
<add> nullCheck(path);
<ide> return binding.chown(pathModule._makeLong(path), uid, gid);
<ide> };
<ide>
<ide> function toUnixTimestamp(time) {
<ide> fs._toUnixTimestamp = toUnixTimestamp;
<ide>
<ide> fs.utimes = function(path, atime, mtime, callback) {
<add> callback = makeCallback(callback);
<add> if (!nullCheck(path, callback)) return;
<ide> binding.utimes(pathModule._makeLong(path),
<ide> toUnixTimestamp(atime),
<ide> toUnixTimestamp(mtime),
<del> makeCallback(callback));
<add> callback);
<ide> };
<ide>
<ide> fs.utimesSync = function(path, atime, mtime) {
<add> nullCheck(path);
<ide> atime = toUnixTimestamp(atime);
<ide> mtime = toUnixTimestamp(mtime);
<ide> binding.utimes(pathModule._makeLong(path), atime, mtime);
<ide> function FSWatcher() {
<ide> util.inherits(FSWatcher, EventEmitter);
<ide>
<ide> FSWatcher.prototype.start = function(filename, persistent) {
<add> nullCheck(filename);
<ide> var r = this._handle.start(pathModule._makeLong(filename), persistent);
<ide>
<ide> if (r) {
<ide> FSWatcher.prototype.close = function() {
<ide> };
<ide>
<ide> fs.watch = function(filename) {
<add> nullCheck(filename);
<ide> var watcher;
<ide> var options;
<ide> var listener;
<ide> util.inherits(StatWatcher, EventEmitter);
<ide>
<ide>
<ide> StatWatcher.prototype.start = function(filename, persistent, interval) {
<add> nullCheck(filename);
<ide> this._handle.start(pathModule._makeLong(filename), persistent, interval);
<ide> };
<ide>
<ide> function inStatWatchers(filename) {
<ide>
<ide>
<ide> fs.watchFile = function(filename) {
<add> nullCheck(filename);
<ide> var stat;
<ide> var listener;
<ide>
<ide> fs.watchFile = function(filename) {
<ide> };
<ide>
<ide> fs.unwatchFile = function(filename, listener) {
<add> nullCheck(filename);
<ide> if (!inStatWatchers(filename)) return;
<ide>
<ide> var stat = statWatchers[filename];
<ide><path>test/simple/test-fs-null-bytes.js
<add>// Copyright Joyent, Inc. and other Node contributors.
<add>//
<add>// Permission is hereby granted, free of charge, to any person obtaining a
<add>// copy of this software and associated documentation files (the
<add>// "Software"), to deal in the Software without restriction, including
<add>// without limitation the rights to use, copy, modify, merge, publish,
<add>// distribute, sublicense, and/or sell copies of the Software, and to permit
<add>// persons to whom the Software is furnished to do so, subject to the
<add>// following conditions:
<add>//
<add>// The above copyright notice and this permission notice shall be included
<add>// in all copies or substantial portions of the Software.
<add>//
<add>// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<add>// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
<add>// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
<add>// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
<add>// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
<add>// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
<add>// USE OR OTHER DEALINGS IN THE SOFTWARE.
<add>
<add>var common = require('../common');
<add>var assert = require('assert');
<add>var fs = require('fs');
<add>
<add>function check(async, sync) {
<add> var expected = /Path must be a string without null bytes./;
<add> var argsSync = Array.prototype.slice.call(arguments, 2);
<add> var argsAsync = argsSync.concat(function(er) {
<add> assert(er && er.message.match(expected));
<add> });
<add>
<add> if (sync)
<add> assert.throws(function() {
<add> console.error(sync.name, argsSync);
<add> sync.apply(null, argsSync);
<add> }, expected);
<add>
<add> if (async)
<add> async.apply(null, argsAsync);
<add>}
<add>
<add>check(fs.appendFile, fs.appendFileSync, 'foo\u0000bar');
<add>check(fs.chmod, fs.chmodSync, 'foo\u0000bar', '0644');
<add>check(fs.chown, fs.chownSync, 'foo\u0000bar', 12, 34);
<add>check(fs.link, fs.linkSync, 'foo\u0000bar', 'foobar');
<add>check(fs.link, fs.linkSync, 'foobar', 'foo\u0000bar');
<add>check(fs.lstat, fs.lstatSync, 'foo\u0000bar');
<add>check(fs.mkdir, fs.mkdirSync, 'foo\u0000bar', '0755');
<add>check(fs.open, fs.openSync, 'foo\u0000bar', 'r');
<add>check(fs.readFile, fs.readFileSync, 'foo\u0000bar');
<add>check(fs.readdir, fs.readdirSync, 'foo\u0000bar');
<add>check(fs.readlink, fs.readlinkSync, 'foo\u0000bar');
<add>check(fs.realpath, fs.realpathSync, 'foo\u0000bar');
<add>check(fs.rename, fs.renameSync, 'foo\u0000bar', 'foobar');
<add>check(fs.rename, fs.renameSync, 'foobar', 'foo\u0000bar');
<add>check(fs.rmdir, fs.rmdirSync, 'foo\u0000bar');
<add>check(fs.stat, fs.statSync, 'foo\u0000bar');
<add>check(fs.symlink, fs.symlinkSync, 'foo\u0000bar', 'foobar');
<add>check(fs.symlink, fs.symlinkSync, 'foobar', 'foo\u0000bar');
<add>check(fs.truncate, fs.truncateSync, 'foo\u0000bar');
<add>check(fs.unlink, fs.unlinkSync, 'foo\u0000bar');
<add>check(null, fs.unwatchFile, 'foo\u0000bar', assert.fail);
<add>check(fs.utimes, fs.utimesSync, 'foo\u0000bar', 0, 0);
<add>check(null, fs.watch, 'foo\u0000bar', assert.fail);
<add>check(null, fs.watchFile, 'foo\u0000bar', assert.fail);
<add>check(fs.writeFile, fs.writeFileSync, 'foo\u0000bar');
<add>
<add>// an 'error' for exists means that it doesn't exist.
<add>// one of many reasons why this file is the absolute worst.
<add>fs.exists('foo\u0000bar', function(exists) {
<add> assert(!exists);
<add>});
<add>assert(!fs.existsSync('foo\u0000bar'));
<add> | 2 |
Mixed | Javascript | fix docs and add missed breaking change | 15efbbdc1fe42f914e6442b32ffc032a4cc8e792 | <ide><path>CHANGELOG.md
<ide> Angular with
<ide> [Unicode Technical Standard #35](http://unicode.org/reports/tr35/#Date_Format_Patterns) used by
<ide> Closure, as well as, future DOM apis currently being proposed to w3c.
<add>- `$xhr.error`'s `request` argument has no `callback` property anymore, use `success` instead
<ide>
<ide>
<ide>
<ide><path>src/service/xhr.error.js
<ide> * - `method` – `{string}` – The http request method.
<ide> * - `url` – `{string}` – The request destination.
<ide> * - `data` – `{(string|Object)=} – An optional request body.
<del> * - `callback` – `{function()}` – The callback function
<add> * - `success` – `{function()}` – The success callback function
<ide> *
<ide> * @param {Object} response Response object.
<ide> * | 2 |
Javascript | Javascript | fix issue with metamorph replace | 14d3f7920141600030c60d921c84f5385a28148b | <ide><path>packages/ember-handlebars/lib/views/metamorph_view.js
<ide> var DOMManager = {
<ide> view.transitionTo('preRender');
<ide>
<ide> Ember.run.schedule('render', this, function() {
<del> if (get(view, 'isDestroyed')) { return; }
<add> if (view.isDestroying) { return; }
<ide>
<ide> view.clearRenderedChildren();
<ide> var buffer = view.renderToBuffer();
<ide><path>packages/ember-handlebars/tests/handlebars_test.js
<ide> test("should update the block when object passed to #if helper changes and an in
<ide> });
<ide> });
<ide>
<add>test("edge case: child conditional should not render children if parent conditional becomes false", function() {
<add> var childCreated = false;
<add>
<add> view = Ember.View.create({
<add> cond1: true,
<add> cond2: false,
<add> viewClass: Ember.View.extend({
<add> init: function() {
<add> this._super();
<add> childCreated = true;
<add> }
<add> }),
<add> template: Ember.Handlebars.compile('{{#if view.cond1}}{{#if view.cond2}}{{#view view.viewClass}}test{{/view}}{{/if}}{{/if}}')
<add> });
<add>
<add> appendView();
<add>
<add> Ember.run(function() {
<add> // The order of these sets is important for the test
<add> view.set('cond2', true);
<add> view.set('cond1', false);
<add> });
<add>
<add> ok(!childCreated, 'child should not be created');
<add>});
<add>
<ide> // test("Should insert a localized string if the {{loc}} helper is used", function() {
<ide> // Ember.stringsFor('en', {
<ide> // 'Brazil': 'Brasilia' | 2 |
Text | Text | fix typo in script component docs | ced204066198a695cc633d71358676fdc24c14b3 | <ide><path>docs/basic-features/script.md
<ide> npm run dev
<ide> # ...
<ide> ```
<ide>
<del>Once setup is complete, defining `strategy="worker` will automatically instantiate Partytown in your application and off-load the script to a web worker.
<add>Once setup is complete, defining `strategy="worker"` will automatically instantiate Partytown in your application and off-load the script to a web worker.
<ide>
<ide> ```jsx
<ide> <Script src="https://example.com/analytics.js" strategy="worker" /> | 1 |
Ruby | Ruby | add xcode 4.5.1 to compiler map | 736717cf24247df492dc6939f8279982c5ebcb4b | <ide><path>Library/Homebrew/macos.rb
<ide> def prefer_64_bit?
<ide> "4.3.3" => {:llvm_build_version=>2336, :clang_version=>"3.1", :clang_build_version=>318},
<ide> "4.4" => {:llvm_build_version=>2336, :clang_version=>"4.0", :clang_build_version=>421},
<ide> "4.4.1" => {:llvm_build_version=>2336, :clang_version=>"4.0", :clang_build_version=>421},
<del> "4.5" => {:llvm_build_version=>2336, :clang_version=>"4.1", :clang_build_version=>421}
<add> "4.5" => {:llvm_build_version=>2336, :clang_version=>"4.1", :clang_build_version=>421},
<add> "4.5.1" => {:llvm_build_version=>2336, :clang_version=>"4.1", :clang_build_version=>421}
<ide> }
<ide>
<ide> def compilers_standard? | 1 |
PHP | PHP | ignore standards for php defined constants | e5ad204265055ca8170315201969ea19f9d8eea8 | <ide><path>lib/Cake/Network/CakeSocket.php
<ide> class CakeSocket {
<ide> * @var array
<ide> */
<ide> protected $_encryptMethods = array(
<add> // @codingStandardsIgnoreStart
<ide> 'sslv2_client' => STREAM_CRYPTO_METHOD_SSLv2_CLIENT,
<ide> 'sslv3_client' => STREAM_CRYPTO_METHOD_SSLv3_CLIENT,
<ide> 'sslv23_client' => STREAM_CRYPTO_METHOD_SSLv23_CLIENT,
<ide> class CakeSocket {
<ide> 'sslv3_server' => STREAM_CRYPTO_METHOD_SSLv3_SERVER,
<ide> 'sslv23_server' => STREAM_CRYPTO_METHOD_SSLv23_SERVER,
<ide> 'tls_server' => STREAM_CRYPTO_METHOD_TLS_SERVER
<add> // @codingStandardsIgnoreEnd
<ide> );
<ide>
<ide> /** | 1 |
Ruby | Ruby | avoid empty api pages | b653c29bbec572eb82c4b82ae89d26acfa15b519 | <ide><path>actionpack/lib/abstract_controller/asset_paths.rb
<ide> module AbstractController
<del> module AssetPaths
<add> module AssetPaths #:nodoc:
<ide> extend ActiveSupport::Concern
<ide>
<ide> included do
<ide><path>actionpack/lib/abstract_controller/base.rb
<ide> require 'active_support/core_ext/module/anonymous'
<ide>
<ide> module AbstractController
<del> class Error < StandardError; end
<del> class ActionNotFound < StandardError; end
<add> class Error < StandardError #:nodoc:
<add> end
<add>
<add> class ActionNotFound < StandardError #:nodoc:
<add> end
<ide>
<ide> # <tt>AbstractController::Base</tt> is a low-level API. Nobody should be
<ide> # using it directly, and subclasses (like ActionController::Base) are
<ide><path>actionpack/lib/abstract_controller/logger.rb
<ide> require "active_support/benchmarkable"
<ide>
<ide> module AbstractController
<del> module Logger
<add> module Logger #:nodoc:
<ide> extend ActiveSupport::Concern
<ide>
<ide> included do | 3 |
PHP | PHP | update deprecation suggestions | 41ee62533d61f1727b8b95c0a7528b867410c509 | <ide><path>src/Http/ServerRequest.php
<ide> public function __get($name)
<ide> * @param string $name The property being accessed.
<ide> * @return bool Existence
<ide> * @deprecated 3.4.0 Accessing routing parameters through __isset will removed in 4.0.0.
<del> * Use param() instead.
<add> * Use getParam() instead.
<ide> */
<ide> public function __isset($name)
<ide> {
<ide> public static function addDetector($name, $callable)
<ide>
<ide> /**
<ide> * Add parameters to the request's parsed parameter set. This will overwrite any existing parameters.
<del> * This modifies the parameters available through `$request->params`.
<add> * This modifies the parameters available through `$request->getParam()`.
<ide> *
<ide> * @param array $params Array of parameters to merge in
<ide> * @return $this The current object, you can chain this method.
<ide> public function offsetGet($name)
<ide> * @param string $name Name of the key being written
<ide> * @param mixed $value The value being written.
<ide> * @return void
<del> * @deprecated 3.4.0 The ArrayAccess methods will be removed in 4.0.0. Use withParam() or param() instead.
<add> * @deprecated 3.4.0 The ArrayAccess methods will be removed in 4.0.0. Use withParam() instead.
<ide> */
<ide> public function offsetSet($name, $value)
<ide> {
<ide> public function offsetSet($name, $value)
<ide> *
<ide> * @param string $name thing to check.
<ide> * @return bool
<del> * @deprecated 3.4.0 The ArrayAccess methods will be removed in 4.0.0. Use getParam() or param() instead.
<add> * @deprecated 3.4.0 The ArrayAccess methods will be removed in 4.0.0. Use getParam() instead.
<ide> */
<ide> public function offsetExists($name)
<ide> {
<ide> public function offsetExists($name)
<ide> *
<ide> * @param string $name Name to unset.
<ide> * @return void
<del> * @deprecated 3.4.0 The ArrayAccess methods will be removed in 4.0.0. Use withParam() or param() instead.
<add> * @deprecated 3.4.0 The ArrayAccess methods will be removed in 4.0.0. Use withParam() instead.
<ide> */
<ide> public function offsetUnset($name)
<ide> { | 1 |
PHP | PHP | flush the application after each test | 7602d93ce35e8f0baf9915ebc8be4d885ee0259e | <ide><path>src/Illuminate/Container/Container.php
<ide> public function forgetInstances()
<ide> $this->instances = array();
<ide> }
<ide>
<add> /**
<add> * Flush the container of all bindings and resolved instances.
<add> *
<add> * @return void
<add> */
<add> public function flush()
<add> {
<add> $this->aliases = [];
<add> $this->resolved = [];
<add> $this->bindings = [];
<add> $this->instances = [];
<add> }
<add>
<ide> /**
<ide> * Determine if a given offset exists.
<ide> *
<ide><path>src/Illuminate/Foundation/Application.php
<ide> public function registerCoreContainerAliases()
<ide> }
<ide> }
<ide>
<add> /**
<add> * Flush the container of all bindings and resolved instances.
<add> *
<add> * @return void
<add> */
<add> public function flush()
<add> {
<add> parent::flush();
<add>
<add> $this->loadedProviders = [];
<add> }
<add>
<ide> /**
<ide> * Dynamically access application services.
<ide> *
<ide><path>src/Illuminate/Foundation/Testing/TestCase.php
<ide> abstract class TestCase extends \PHPUnit_Framework_TestCase {
<ide>
<ide> use ApplicationTrait, AssertionsTrait;
<ide>
<add> /**
<add> * Creates the application.
<add> *
<add> * Needs to be implemented by subclasses.
<add> *
<add> * @return \Symfony\Component\HttpKernel\HttpKernelInterface
<add> */
<add> abstract public function createApplication();
<add>
<ide> /**
<ide> * Setup the test environment.
<ide> *
<ide> public function setUp()
<ide> }
<ide>
<ide> /**
<del> * Creates the application.
<del> *
<del> * Needs to be implemented by subclasses.
<add> * Clean up the testing environment before the next test.
<ide> *
<del> * @return \Symfony\Component\HttpKernel\HttpKernelInterface
<add> * @return void
<ide> */
<del> abstract public function createApplication();
<add> public function tearDown()
<add> {
<add> $this->app->flush();
<add> }
<ide>
<ide> } | 3 |
Python | Python | add profile command to cli | cec76801dc870ae3e1f8682e84126ee69a2a25a2 | <ide><path>spacy/__main__.py
<ide> import plac
<ide> import sys
<ide> from spacy.cli import download, link, info, package, train, convert, model
<add> from spacy.cli import profile
<ide> from spacy.util import prints
<ide>
<ide> commands = {
<ide> 'train': train,
<ide> 'convert': convert,
<ide> 'package': package,
<del> 'model': model
<add> 'model': model,
<add> 'profile': profile,
<ide> }
<ide> if len(sys.argv) == 1:
<ide> prints(', '.join(commands), title="Available commands", exits=1) | 1 |
Java | Java | add marble diagrams for single.repeat operators | c8bcb3c92bebde39616597bd46ddb93d6f0f6e18 | <ide><path>src/main/java/io/reactivex/Single.java
<ide> public final Single<T> onTerminateDetach() {
<ide>
<ide> /**
<ide> * Repeatedly re-subscribes to the current Single and emits each success value.
<add> * <p>
<add> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeat.png" alt="">
<ide> * <dl>
<ide> * <dt><b>Backpressure:</b></dt>
<ide> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd>
<ide> public final Flowable<T> repeat() {
<ide>
<ide> /**
<ide> * Re-subscribes to the current Single at most the given number of times and emits each success value.
<add> * <p>
<add> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeat.n.png" alt="">
<ide> * <dl>
<ide> * <dt><b>Backpressure:</b></dt>
<ide> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd>
<ide> public final Flowable<T> repeat(long times) {
<ide> * Re-subscribes to the current Single if
<ide> * the Publisher returned by the handler function signals a value in response to a
<ide> * value signalled through the Flowable the handle receives.
<add> * <p>
<add> * <img width="640" height="1478" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeatWhen.png" alt="">
<ide> * <dl>
<ide> * <dt><b>Backpressure:</b></dt>
<ide> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer. | 1 |
Ruby | Ruby | fix version detection and bottles | d9a18d4c1e679d95ff9828f00d246d9b03ccb984 | <ide><path>Library/Homebrew/test/test_versions.rb
<ide> def test_erlang_version_style
<ide> assert_version_detected 'R13B', 'http://erlang.org/download/otp_src_R13B.tar.gz'
<ide> end
<ide>
<add> def test_another_erlang_version_style
<add> assert_version_detected 'R15B01', 'https://github.com/erlang/otp/tarball/OTP_R15B01'
<add> end
<add>
<ide> def test_p7zip_version_style
<ide> assert_version_detected '9.04',
<ide> 'http://kent.dl.sourceforge.net/sourceforge/p7zip/p7zip_9.04_src_all.tar.bz2'
<ide> def test_erlang_bottle_style
<ide> assert_version_detected 'R15B', 'https://downloads.sf.net/project/machomebrew/Bottles/erlang-R15B.lion.bottle.tar.gz'
<ide> end
<ide>
<add> def test_another_erlang_bottle_style
<add> assert_version_detected 'R15B01', 'https://downloads.sf.net/project/machomebrew/Bottles/erlang-R15B01.mountainlion.bottle.tar.gz'
<add> end
<add>
<ide> def test_old_bottle_style
<ide> assert_version_detected '4.7.3', 'https://downloads.sf.net/project/machomebrew/Bottles/qt-4.7.3-bottle.tar.gz'
<ide> end
<ide><path>Library/Homebrew/version.rb
<ide> def self._parse spec
<ide> m = %r[github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+_(\d+))$].match(spec.to_s)
<ide> return m.captures.first unless m.nil?
<ide>
<add> # e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style)
<add> m = /[-_](R\d+[AB]\d*)/.match(spec.to_s)
<add> return m.captures.first unless m.nil?
<add>
<ide> # e.g. boost_1_39_0
<ide> m = /((\d+_)+\d+)$/.match(stem)
<ide> return m.captures.first.gsub('_', '.') unless m.nil?
<ide> def self._parse spec
<ide> m = /_((\d+\.)+\d+[abc]?)[.]orig$/.match(stem)
<ide> return m.captures.first unless m.nil?
<ide>
<del> # e.g. erlang-R14B03-bottle.tar.gz (old erlang bottle style)
<add> # e.g. http://www.openssl.org/source/openssl-0.9.8s.tar.gz
<ide> m = /-([^-]+)/.match(stem)
<ide> return m.captures.first unless m.nil?
<ide>
<del> # e.g. opt_src_R13B (erlang)
<del> m = /otp_src_(.+)/.match(stem)
<del> return m.captures.first unless m.nil?
<del>
<ide> # e.g. astyle_1.23_macosx.tar.gz
<ide> m = /_([^_]+)/.match(stem)
<ide> return m.captures.first unless m.nil? | 2 |
Python | Python | fix val_step in fit_generator with sequence | 6746bda3dcda273580fef2d911c6cc333c8a626c | <ide><path>keras/engine/training_generator.py
<ide> def fit_generator(model,
<ide> if isinstance(val_data, Sequence):
<ide> val_enqueuer = OrderedEnqueuer(val_data,
<ide> use_multiprocessing=use_multiprocessing)
<del> validation_steps = len(val_data)
<add> validation_steps = validation_steps or len(val_data)
<ide> else:
<ide> val_enqueuer = GeneratorEnqueuer(val_data,
<ide> use_multiprocessing=use_multiprocessing)
<ide><path>tests/keras/engine/test_training.py
<ide> class RandomSequence(Sequence):
<ide> def __init__(self, batch_size, sequence_length=12):
<ide> self.batch_size = batch_size
<ide> self.sequence_length = sequence_length
<add> self.logs = [] # It will work for use_multiprocessing=False
<ide>
<ide> def __len__(self):
<ide> return self.sequence_length
<ide>
<ide> def __getitem__(self, idx):
<add> self.logs.append(idx)
<ide> return ([np.random.random((self.batch_size, 3)),
<ide> np.random.random((self.batch_size, 3))],
<ide> [np.random.random((self.batch_size, 4)),
<ide> def gen_data():
<ide> sample_weight_mode=None)
<ide> trained_epochs = []
<ide> trained_batches = []
<add> val_seq = RandomSequence(4)
<ide> out = model.fit_generator(generator=RandomSequence(3),
<ide> steps_per_epoch=3,
<ide> epochs=5,
<ide> initial_epoch=0,
<del> validation_data=RandomSequence(4),
<add> validation_data=val_seq,
<ide> validation_steps=3,
<add> max_queue_size=1,
<ide> callbacks=[tracker_cb])
<ide> assert trained_epochs == [0, 1, 2, 3, 4]
<ide> assert trained_batches == list(range(3)) * 5
<add> assert len(val_seq.logs) <= 4 * 5
<ide>
<ide> # steps_per_epoch will be equal to len of sequence if it's unspecified
<ide> trained_epochs = []
<ide> trained_batches = []
<add> val_seq = RandomSequence(4)
<ide> out = model.fit_generator(generator=RandomSequence(3),
<ide> epochs=5,
<ide> initial_epoch=0,
<del> validation_data=RandomSequence(4),
<add> validation_data=val_seq,
<ide> callbacks=[tracker_cb])
<ide> assert trained_epochs == [0, 1, 2, 3, 4]
<ide> assert trained_batches == list(range(12)) * 5
<add> assert len(val_seq.logs) == 12 * 5
<ide>
<ide> # fit_generator will throw an exception
<ide> # if steps is unspecified for regular generator | 2 |
Ruby | Ruby | escape globbed parameters in routes correctly | 6776edccf6fb553eb0ac6db55e1d30df1b5b6589 | <ide><path>actionpack/lib/action_controller/routing/segments.rb
<ide> def match_extraction(next_capture)
<ide> end
<ide>
<ide> class PathSegment < DynamicSegment #:nodoc:
<del> RESERVED_PCHAR = "#{Segment::RESERVED_PCHAR}/"
<del> UNSAFE_PCHAR = Regexp.new("[^#{URI::REGEXP::PATTERN::UNRESERVED}#{RESERVED_PCHAR}]", false, 'N').freeze
<del>
<ide> def interpolation_chunk(value_code = "#{local_name}")
<del> "\#{URI.escape(#{value_code}.to_s, ActionController::Routing::PathSegment::UNSAFE_PCHAR)}"
<add> "\#{#{value_code}}"
<add> end
<add>
<add> def extract_value
<add> "#{local_name} = hash[:#{key}] && hash[:#{key}].collect { |path_component| URI.escape(path_component, ActionController::Routing::Segment::UNSAFE_PCHAR) }.to_param #{"|| #{default.inspect}" if default}"
<ide> end
<ide>
<ide> def default
<ide><path>actionpack/test/controller/routing_test.rb
<ide> def setup
<ide> ActionController::Routing.use_controllers! ['controller']
<ide> @set = ActionController::Routing::RouteSet.new
<ide> @set.draw do |map|
<del> map.connect ':controller/:action/:variable'
<add> map.connect ':controller/:action/:variable/*additional'
<ide> end
<ide>
<ide> safe, unsafe = %w(: @ & = + $ , ;), %w(^ / ? # [ ])
<ide> def setup
<ide> end
<ide>
<ide> def test_route_generation_escapes_unsafe_path_characters
<del> assert_equal "/contr#{@segment}oller/act#{@escaped}ion/var#{@escaped}iable",
<add> assert_equal "/contr#{@segment}oller/act#{@escaped}ion/var#{@escaped}iable/add#{@escaped}itional-1/add#{@escaped}itional-2",
<ide> @set.generate(:controller => "contr#{@segment}oller",
<ide> :action => "act#{@segment}ion",
<del> :variable => "var#{@segment}iable")
<add> :variable => "var#{@segment}iable",
<add> :additional => ["add#{@segment}itional-1", "add#{@segment}itional-2"])
<ide> end
<ide>
<ide> def test_route_recognition_unescapes_path_components
<ide> options = { :controller => "controller",
<ide> :action => "act#{@segment}ion",
<del> :variable => "var#{@segment}iable" }
<del> assert_equal options, @set.recognize_path("/controller/act#{@escaped}ion/var#{@escaped}iable")
<add> :variable => "var#{@segment}iable",
<add> :additional => ["add#{@segment}itional-1", "add#{@segment}itional-2"] }
<add> assert_equal options, @set.recognize_path("/controller/act#{@escaped}ion/var#{@escaped}iable/add#{@escaped}itional-1/add#{@escaped}itional-2")
<ide> end
<ide> end
<ide> | 2 |
Ruby | Ruby | fix error message when adapter is not specified | 83b995206a569d8d08b697ee9f86a64ca1854bcc | <ide><path>activerecord/lib/active_record/connection_adapters/connection_specification.rb
<ide> def resolve_symbol_connection(env_name, pool_name)
<ide> if db_config
<ide> resolve_connection(db_config.config).merge("name" => pool_name.to_s)
<ide> else
<del> raise(AdapterNotSpecified, "'#{env_name}' database is not configured. Available: #{configurations.configurations.map(&:env_name).join(", ")}")
<add> raise AdapterNotSpecified, <<~MSG
<add> The `#{env_name}` database is not configured for the `#{ActiveRecord::ConnectionHandling::DEFAULT_ENV.call}` environment.
<add>
<add> Available databases configurations are:
<add>
<add> #{build_configuration_sentence}
<add> MSG
<ide> end
<ide> end
<ide>
<add> def build_configuration_sentence # :nodoc:
<add> configs = configurations.configs_for(include_replicas: true)
<add>
<add> configs.group_by(&:env_name).map do |env, config|
<add> namespaces = config.map(&:spec_name)
<add> if namespaces.size > 1
<add> "#{env}: #{namespaces.join(", ")}"
<add> else
<add> env
<add> end
<add> end.join("\n")
<add> end
<add>
<ide> # Accepts a hash. Expands the "url" key that contains a
<ide> # URL database connection to a full connection
<ide> # hash and merges with the rest of the hash. | 1 |
Python | Python | fix keyerror on missing exitcode | 206cce971da6941e8c1b0d3c4dbf4fa8afe0fba4 | <ide><path>airflow/providers/amazon/aws/operators/ecs.py
<ide> def _check_success_task(self) -> None:
<ide> )
<ide> containers = task['containers']
<ide> for container in containers:
<del> if container.get('lastStatus') == 'STOPPED' and container['exitCode'] != 0:
<add> if container.get('lastStatus') == 'STOPPED' and container.get('exitCode', 1) != 0:
<ide> if self.task_log_fetcher:
<ide> last_logs = "\n".join(
<ide> self.task_log_fetcher.get_last_log_messages(self.number_logs_exception)
<ide><path>tests/providers/amazon/aws/operators/test_ecs.py
<ide> def test_check_success_tasks_raises_logs_disabled(self):
<ide> assert "'exitCode': 1" in str(ctx.value)
<ide> client_mock.describe_tasks.assert_called_once_with(cluster='c', tasks=['arn'])
<ide>
<add> def test_check_success_tasks_handles_initialization_failure(self):
<add> client_mock = mock.Mock()
<add> self.ecs.arn = 'arn'
<add> self.ecs.client = client_mock
<add>
<add> # exitCode is missing during some container initialization failures
<add> client_mock.describe_tasks.return_value = {
<add> 'tasks': [{'containers': [{'name': 'foo', 'lastStatus': 'STOPPED'}]}]
<add> }
<add>
<add> with pytest.raises(Exception) as ctx:
<add> self.ecs._check_success_task()
<add>
<add> print(str(ctx.value))
<add> assert "This task is not in success state " in str(ctx.value)
<add> assert "'name': 'foo'" in str(ctx.value)
<add> assert "'lastStatus': 'STOPPED'" in str(ctx.value)
<add> assert "exitCode" not in str(ctx.value)
<add> client_mock.describe_tasks.assert_called_once_with(cluster='c', tasks=['arn'])
<add>
<ide> def test_check_success_tasks_raises_pending(self):
<ide> client_mock = mock.Mock()
<ide> self.ecs.client = client_mock | 2 |
Python | Python | add beam search | 9660ba1cbdec0e419937af06bd99f06fb5ebbf91 | <ide><path>examples/run_summarization_finetuning.py
<del># coding=utf-8
<del># Copyright 2019 The HuggingFace Inc. team.
<del># Copyright (c) 2019 The HuggingFace Inc. All rights reserved.
<del>#
<del># Licensed under the Apache License, Version 2.0 (the "License");
<del># you may not use this file except in compliance with the License.
<del># You may obtain a copy of the License at
<del>#
<del># http://www.apache.org/licenses/LICENSE-2.0
<del>#
<del># Unless required by applicable law or agreed to in writing, software
<del># distributed under the License is distributed on an "AS IS" BASIS,
<del># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<del># See the License for the specific language governing permissions and
<del># limitations under the License.
<del>""" Finetuning seq2seq models for sequence generation."""
<del>
<del>import argparse
<del>import functools
<del>import logging
<del>import os
<del>import random
<del>import sys
<del>
<del>import numpy as np
<del>from tqdm import tqdm, trange
<del>import torch
<del>from torch.optim import Adam
<del>from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
<del>
<del>from transformers import (
<del> AutoTokenizer,
<del> BertForMaskedLM,
<del> BertConfig,
<del> PreTrainedEncoderDecoder,
<del> Model2Model,
<del>)
<del>
<del>from utils_summarization import (
<del> CNNDailyMailDataset,
<del> encode_for_summarization,
<del> fit_to_block_size,
<del> build_lm_labels,
<del> build_mask,
<del> compute_token_type_ids,
<del>)
<del>
<del>logger = logging.getLogger(__name__)
<del>logging.basicConfig(stream=sys.stdout, level=logging.INFO)
<del>
<del>
<del>def set_seed(args):
<del> random.seed(args.seed)
<del> np.random.seed(args.seed)
<del> torch.manual_seed(args.seed)
<del>
<del>
<del># ------------
<del># Load dataset
<del># ------------
<del>
<del>
<del>def load_and_cache_examples(args, tokenizer):
<del> dataset = CNNDailyMailDataset(tokenizer, data_dir=args.data_dir)
<del> return dataset
<del>
<del>
<del>def collate(data, tokenizer, block_size):
<del> """ List of tuple as an input. """
<del> # remove the files with empty an story/summary, encode and fit to block
<del> data = filter(lambda x: not (len(x[0]) == 0 or len(x[1]) == 0), data)
<del> data = [
<del> encode_for_summarization(story, summary, tokenizer) for story, summary in data
<del> ]
<del> data = [
<del> (
<del> fit_to_block_size(story, block_size, tokenizer.pad_token_id),
<del> fit_to_block_size(summary, block_size, tokenizer.pad_token_id),
<del> )
<del> for story, summary in data
<del> ]
<del>
<del> stories = torch.tensor([story for story, summary in data])
<del> summaries = torch.tensor([summary for story, summary in data])
<del> encoder_token_type_ids = compute_token_type_ids(stories, tokenizer.cls_token_id)
<del> encoder_mask = build_mask(stories, tokenizer.pad_token_id)
<del> decoder_mask = build_mask(summaries, tokenizer.pad_token_id)
<del> lm_labels = build_lm_labels(summaries, tokenizer.pad_token_id)
<del>
<del> return (
<del> stories,
<del> summaries,
<del> encoder_token_type_ids,
<del> encoder_mask,
<del> decoder_mask,
<del> lm_labels,
<del> )
<del>
<del>
<del># ----------
<del># Optimizers
<del># ----------
<del>
<del>
<del>class BertSumOptimizer(object):
<del> """ Specific optimizer for BertSum.
<del>
<del> As described in [1], the authors fine-tune BertSum for abstractive
<del> summarization using two Adam Optimizers with different warm-up steps and
<del> learning rate. They also use a custom learning rate scheduler.
<del>
<del> [1] Liu, Yang, and Mirella Lapata. "Text summarization with pretrained encoders."
<del> arXiv preprint arXiv:1908.08345 (2019).
<del> """
<del>
<del> def __init__(self, model, lr, warmup_steps, beta_1=0.99, beta_2=0.999, eps=1e-8):
<del> self.encoder = model.encoder
<del> self.decoder = model.decoder
<del> self.lr = lr
<del> self.warmup_steps = warmup_steps
<del>
<del> self.optimizers = {
<del> "encoder": Adam(
<del> model.encoder.parameters(),
<del> lr=lr["encoder"],
<del> betas=(beta_1, beta_2),
<del> eps=eps,
<del> ),
<del> "decoder": Adam(
<del> model.decoder.parameters(),
<del> lr=lr["decoder"],
<del> betas=(beta_1, beta_2),
<del> eps=eps,
<del> ),
<del> }
<del>
<del> self._step = 0
<del>
<del> def _update_rate(self, stack):
<del> return self.lr[stack] * min(
<del> self._step ** (-0.5), self._step * self.warmup_steps[stack] ** (-0.5)
<del> )
<del>
<del> def zero_grad(self):
<del> self.optimizer_decoder.zero_grad()
<del> self.optimizer_encoder.zero_grad()
<del>
<del> def step(self):
<del> self._step += 1
<del> for stack, optimizer in self.optimizers.items():
<del> new_rate = self._update_rate(stack)
<del> for param_group in optimizer.param_groups:
<del> param_group["lr"] = new_rate
<del> optimizer.step()
<del>
<del>
<del># ------------
<del># Train
<del># ------------
<del>
<del>
<del>def train(args, model, tokenizer):
<del> """ Fine-tune the pretrained model on the corpus. """
<del> set_seed(args)
<del>
<del> # Load the data
<del> args.train_batch_size = args.per_gpu_train_batch_size * max(1, args.n_gpu)
<del> train_dataset = load_and_cache_examples(args, tokenizer)
<del> train_sampler = RandomSampler(train_dataset)
<del> model_collate_fn = functools.partial(collate, tokenizer=tokenizer, block_size=512)
<del> train_dataloader = DataLoader(
<del> train_dataset,
<del> sampler=train_sampler,
<del> batch_size=args.train_batch_size,
<del> collate_fn=model_collate_fn,
<del> )
<del>
<del> # Training schedule
<del> if args.max_steps > 0:
<del> t_total = args.max_steps
<del> args.num_train_epochs = t_total // (
<del> len(train_dataloader) // args.gradient_accumulation_steps + 1
<del> )
<del> else:
<del> t_total = (
<del> len(train_dataloader)
<del> // args.gradient_accumulation_steps
<del> * args.num_train_epochs
<del> )
<del>
<del> # Prepare the optimizer
<del> lr = {"encoder": 0.002, "decoder": 0.2}
<del> warmup_steps = {"encoder": 20000, "decoder": 10000}
<del> optimizer = BertSumOptimizer(model, lr, warmup_steps)
<del>
<del> # Train
<del> logger.info("***** Running training *****")
<del> logger.info(" Num examples = %d", len(train_dataset))
<del> logger.info(" Num Epochs = %d", args.num_train_epochs)
<del> logger.info(
<del> " Instantaneous batch size per GPU = %d", args.per_gpu_train_batch_size
<del> )
<del> logger.info(
<del> " Total train batch size (w. parallel, distributed & accumulation) = %d",
<del> args.train_batch_size * args.gradient_accumulation_steps
<del> # * (torch.distributed.get_world_size() if args.local_rank != -1 else 1),
<del> )
<del> logger.info(" Gradient Accumulation steps = %d", args.gradient_accumulation_steps)
<del> logger.info(" Total optimization steps = %d", t_total)
<del>
<del> model.zero_grad()
<del> train_iterator = trange(args.num_train_epochs, desc="Epoch", disable=True)
<del>
<del> global_step = 0
<del> tr_loss = 0.0
<del> for _ in train_iterator:
<del> epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=True)
<del> for step, batch in enumerate(epoch_iterator):
<del> source, target, encoder_token_type_ids, encoder_mask, decoder_mask, lm_labels = batch
<del>
<del> source = source.to(args.device)
<del> target = target.to(args.device)
<del> encoder_token_type_ids = encoder_token_type_ids.to(args.device)
<del> encoder_mask = encoder_mask.to(args.device)
<del> decoder_mask = decoder_mask.to(args.device)
<del> lm_labels = lm_labels.to(args.device)
<del>
<del> model.train()
<del> outputs = model(
<del> source,
<del> target,
<del> encoder_token_type_ids=encoder_token_type_ids,
<del> encoder_attention_mask=encoder_mask,
<del> decoder_attention_mask=decoder_mask,
<del> decoder_lm_labels=lm_labels,
<del> )
<del>
<del> loss = outputs[0]
<del> print(loss)
<del> if args.gradient_accumulation_steps > 1:
<del> loss /= args.gradient_accumulation_steps
<del>
<del> loss.backward()
<del>
<del> tr_loss += loss.item()
<del> if (step + 1) % args.gradient_accumulation_steps == 0:
<del> torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
<del> optimizer.step()
<del> model.zero_grad()
<del> global_step += 1
<del>
<del> if args.max_steps > 0 and global_step > args.max_steps:
<del> epoch_iterator.close()
<del> break
<del>
<del> if args.max_steps > 0 and global_step > args.max_steps:
<del> train_iterator.close()
<del> break
<del>
<del> return global_step, tr_loss / global_step
<del>
<del>
<del># ------------
<del># Train
<del># ------------
<del>
<del>
<del>def evaluate(args, model, tokenizer, prefix=""):
<del> set_seed(args)
<del>
<del> args.eval_batch_size = args.per_gpu_eval_batch_size * max(1, args.n_gpu)
<del> eval_dataset = load_and_cache_examples(args, tokenizer, evaluate=True)
<del> eval_sampler = SequentialSampler(eval_dataset)
<del> eval_dataloader = DataLoader(
<del> eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size
<del> )
<del>
<del> # multi-gpu evaluate
<del> if args.n_gpu > 1:
<del> model = torch.nn.DataParallel(model)
<del>
<del> logger.info("***** Running evaluation {} *****".format(prefix))
<del> logger.info(" Num examples = %d", len(eval_dataset))
<del> logger.info(" Batch size = %d", args.eval_batch_size)
<del> eval_loss = 0.0
<del> nb_eval_steps = 0
<del> model.eval()
<del>
<del> for batch in tqdm(eval_dataloader, desc="Evaluating"):
<del> source, target, encoder_token_type_ids, encoder_mask, decoder_mask, lm_labels = batch
<del>
<del> source = source.to(args.device)
<del> target = target.to(args.device)
<del> encoder_token_type_ids = encoder_token_type_ids.to(args.device)
<del> encoder_mask = encoder_mask.to(args.device)
<del> decoder_mask = decoder_mask.to(args.device)
<del> lm_labels = lm_labels.to(args.device)
<del>
<del> with torch.no_grad():
<del> outputs = model(
<del> source,
<del> target,
<del> encoder_token_type_ids=encoder_token_type_ids,
<del> encoder_attention_mask=encoder_mask,
<del> decoder_attention_mask=decoder_mask,
<del> decoder_lm_labels=lm_labels,
<del> )
<del> lm_loss = outputs[0]
<del> eval_loss += lm_loss.mean().item()
<del> nb_eval_steps += 1
<del>
<del> eval_loss = eval_loss / nb_eval_steps
<del> perplexity = torch.exp(torch.tensor(eval_loss))
<del>
<del> result = {"perplexity": perplexity}
<del>
<del> # Save the evaluation's results
<del> output_eval_file = os.path.join(args.output_dir, "eval_results.txt")
<del> if not os.path.exists(args.output_dir):
<del> os.makedirs(args.output_dir)
<del>
<del> with open(output_eval_file, "w") as writer:
<del> logger.info("***** Eval results {} *****".format(prefix))
<del> for key in sorted(result.keys()):
<del> logger.info(" %s = %s", key, str(result[key]))
<del> writer.write("%s = %s\n" % (key, str(result[key])))
<del>
<del> return result
<del>
<del>
<del>def save_model_checkpoints(args, model, tokenizer):
<del> if not os.path.exists(args.output_dir):
<del> os.makedirs(args.output_dir)
<del>
<del> logger.info("Saving model checkpoint to %s", args.output_dir)
<del>
<del> # Save a trained model, configuration and tokenizer using `save_pretrained()`.
<del> # They can then be reloaded using `from_pretrained()`
<del> model_to_save = (
<del> model.module if hasattr(model, "module") else model
<del> ) # Take care of distributed/parallel training
<del> model_to_save.save_pretrained(args.output_dir, model_type='bert')
<del> tokenizer.save_pretrained(args.output_dir)
<del> torch.save(args, os.path.join(args.output_dir, "training_arguments.bin"))
<del>
<del>
<del>def main():
<del> parser = argparse.ArgumentParser()
<del>
<del> # Required parameters
<del> parser.add_argument(
<del> "--data_dir",
<del> default=None,
<del> type=str,
<del> required=True,
<del> help="The input training data file (a text file).",
<del> )
<del> parser.add_argument(
<del> "--output_dir",
<del> default=None,
<del> type=str,
<del> required=True,
<del> help="The output directory where the model predictions and checkpoints will be written.",
<del> )
<del>
<del> # Optional parameters
<del> parser.add_argument(
<del> "--gradient_accumulation_steps",
<del> type=int,
<del> default=1,
<del> help="Number of updates steps to accumulate before performing a backward/update pass.",
<del> )
<del> parser.add_argument(
<del> "--do_evaluate",
<del> type=bool,
<del> default=False,
<del> help="Run model evaluation on out-of-sample data.",
<del> )
<del> parser.add_argument("--do_train", type=bool, default=False, help="Run training.")
<del> parser.add_argument(
<del> "--do_overwrite_output_dir",
<del> type=bool,
<del> default=False,
<del> help="Whether to overwrite the output dir.",
<del> )
<del> parser.add_argument(
<del> "--model_name_or_path",
<del> default="bert-base-cased",
<del> type=str,
<del> help="The model checkpoint to initialize the encoder and decoder's weights with.",
<del> )
<del> parser.add_argument(
<del> "--model_type",
<del> default="bert",
<del> type=str,
<del> help="The decoder architecture to be fine-tuned.",
<del> )
<del> parser.add_argument(
<del> "--max_grad_norm", default=1.0, type=float, help="Max gradient norm."
<del> )
<del> parser.add_argument(
<del> "--max_steps",
<del> default=-1,
<del> type=int,
<del> help="If > 0: set total number of training steps to perform. Override num_train_epochs.",
<del> )
<del> parser.add_argument(
<del> "--to_cpu", default=False, type=bool, help="Whether to force training on CPU."
<del> )
<del> parser.add_argument(
<del> "--num_train_epochs",
<del> default=10,
<del> type=int,
<del> help="Total number of training epochs to perform.",
<del> )
<del> parser.add_argument(
<del> "--per_gpu_train_batch_size",
<del> default=4,
<del> type=int,
<del> help="Batch size per GPU/CPU for training.",
<del> )
<del> parser.add_argument("--seed", default=42, type=int)
<del> args = parser.parse_args()
<del>
<del> if (
<del> os.path.exists(args.output_dir)
<del> and os.listdir(args.output_dir)
<del> and args.do_train
<del> and not args.do_overwrite_output_dir
<del> ):
<del> raise ValueError(
<del> "Output directory ({}) already exists and is not empty. Use --do_overwrite_output_dir to overwrite.".format(
<del> args.output_dir
<del> )
<del> )
<del>
<del> # Set up training device
<del> if args.to_cpu or not torch.cuda.is_available():
<del> args.device = torch.device("cpu")
<del> args.n_gpu = 0
<del> else:
<del> args.device = torch.device("cuda")
<del> args.n_gpu = torch.cuda.device_count()
<del>
<del> # Load pretrained model and tokenizer. The decoder's weights are randomly initialized.
<del> tokenizer = AutoTokenizer.from_pretrained(args.model_name_or_path)
<del> config = BertConfig.from_pretrained(args.model_name_or_path)
<del> decoder_model = BertForMaskedLM(config)
<del> model = Model2Model.from_pretrained(
<del> args.model_name_or_path, decoder_model=decoder_model
<del> )
<del>
<del> # Setup logging
<del> logging.basicConfig(
<del> format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
<del> datefmt="%m/%d/%Y %H:%M:%S",
<del> level=logging.INFO,
<del> )
<del> logger.warning(
<del> "Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s",
<del> 0,
<del> args.device,
<del> args.n_gpu,
<del> False,
<del> False,
<del> )
<del>
<del> logger.info("Training/evaluation parameters %s", args)
<del>
<del> # Train the model
<del> model.to(args.device)
<del> if args.do_train:
<del> try:
<del> global_step, tr_loss = train(args, model, tokenizer)
<del> except KeyboardInterrupt:
<del> response = input("You interrupted the training. Do you want to save the model checkpoints? [Y/n]")
<del> if response.lower() in ["", "y", "yes"]:
<del> save_model_checkpoints(args, model, tokenizer)
<del> sys.exit(0)
<del>
<del> logger.info(" global_step = %s, average loss = %s", global_step, tr_loss)
<del> save_model_checkpoints(args, model, tokenizer)
<del>
<del> # Evaluate the model
<del> results = {}
<del> if args.do_evaluate:
<del> checkpoints = [args.output_dir]
<del> logger.info("Evaluate the following checkpoints: %s", checkpoints)
<del> for checkpoint in checkpoints:
<del> encoder_checkpoint = os.path.join(checkpoint, "bert_encoder")
<del> decoder_checkpoint = os.path.join(checkpoint, "bert_decoder")
<del> model = PreTrainedEncoderDecoder.from_pretrained(
<del> encoder_checkpoint, decoder_checkpoint
<del> )
<del> model.to(args.device)
<del> print("model loaded")
<del>
<del> return results
<del>
<del>
<del>if __name__ == "__main__":
<del> main()
<ide><path>examples/utils_summarization.py
<ide> class CNNDailyMailDataset(Dataset):
<ide> [2] https://github.com/abisee/cnn-dailymail/
<ide> """
<ide>
<del> def __init__(self, tokenizer, prefix="train", data_dir=""):
<add> def __init__(self, data_dir="", prefix="train"):
<ide> assert os.path.isdir(data_dir)
<del> self.tokenizer = tokenizer
<ide>
<ide> # We initialize the class by listing all the files that contain
<ide> # stories and summaries. Files are not read in memory given
<ide> def _add_missing_period(line):
<ide> # --------------------------
<ide>
<ide>
<del>def fit_to_block_size(sequence, block_size, pad_token):
<add>def fit_to_block_size(sequence, block_size, pad_token_id):
<ide> """ Adapt the source and target sequences' lengths to the block size.
<del> If the sequence is shorter than the block size we pad it with -1 ids
<del> which correspond to padding tokens.
<add> If the sequence is shorter we append padding token to the right of the sequence.
<ide> """
<ide> if len(sequence) > block_size:
<ide> return sequence[:block_size]
<ide> else:
<del> sequence.extend([pad_token] * (block_size - len(sequence)))
<add> sequence.extend([pad_token_id] * (block_size - len(sequence)))
<ide> return sequence
<ide>
<ide>
<del>def build_lm_labels(sequence, pad_token):
<del> """ Padding token, encoded as 0, are represented by the value -1 so they
<add>def build_lm_labels(sequence, pad_token_id):
<add> """ Padding token are replaced by the value -1 so they
<ide> are not taken into account in the loss computation. """
<ide> padded = sequence.clone()
<del> padded[padded == pad_token] = -1
<add> padded[padded == pad_token_id] = -1
<ide> return padded
<ide>
<ide>
<del>def build_mask(sequence, pad_token):
<add>def build_mask(sequence, pad_token_id):
<ide> """ Builds the mask. The attention mechanism will only attend to positions
<ide> with value 1. """
<ide> mask = torch.ones_like(sequence)
<del> idx_pad_tokens = sequence == pad_token
<add> idx_pad_tokens = sequence == pad_token_id
<ide> mask[idx_pad_tokens] = 0
<ide> return mask
<ide>
<ide><path>transformers/generate/__init__.py
<add>from .beam_search import BeamSearch
<ide><path>transformers/generate/beam_search.py
<add># coding=utf-8
<add># MIT License
<add>
<add># Copyright (c) 2017-Present OpenNMT
<add>
<add># Permission is hereby granted, free of charge, to any person obtaining a copy of
<add># this software and associated documentation files (the "Software"), to deal in
<add># the Software without restriction, including without limitation the rights to
<add># use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
<add># of the Software, and to permit persons to whom the Software is furnished to do
<add># so, subject to the following conditions:
<add>
<add># The above copyright notice and this permission notice shall be included in all
<add># copies or substantial portions of the Software.
<add>
<add># THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
<add># IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<add># FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
<add># AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
<add># LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
<add># OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
<add># SOFTWARE.
<add>"""
<add>Use Beam Search to generate sequences using encoder-decoder models.
<add>"""
<add>import torch
<add>from torch import nn
<add>
<add>
<add>class BeamSearch(nn.Module):
<add> def __init__(
<add> self,
<add> model,
<add> tokenizer,
<add> beam_size,
<add> min_length,
<add> max_length,
<add> batch_size=1,
<add> alpha=0,
<add> block_repeating_trigrams=True,
<add> ):
<add> r"""
<add> Inputs:
<add> **model**: instance of ``transformers.PreTrainedEncoderDecoder``
<add> The pretrained encoder-decoder model that will be used to generate the sequences.
<add> **tokenizer**: instance of ``transformers.PreTrainedTokenizer``
<add> The pretrained tokenizer associated to the model used in the encoder-decoder. We only
<add> support encoder-decoder that use the same tokenizer for encoder and decoder. The tokenizer
<add> needs to be initialized or this function will raise and exception.
<add> **batch_size**: (`optional`) int
<add> Batch size of the inputs. The value is set automatically when calling `forward`.
<add> **beam_size**: int
<add> Number of beams that are used for each element on the batch.
<add> **min_length**: int
<add> Minimum number of steps performed by the beam search before terminating.
<add> **max_length**: int
<add> Maximum number of steps performed by the beam search. Any beam that has not finished
<add> will return its current solution with the highest probability. The sequence that is
<add> returned has a length of max_length-1 to account for the end token that is subsequently added.
<add> **alpha**: float
<add> Parameter of the length penalty. Read the documentation of the `_length_penalty` method for mode details.
<add> **block_repeating_trigrams**: bool
<add> Whether to block sequences that have repeating 3-grams.
<add> """
<add> super(BeamSearch, self).__init__()
<add> self.model = model
<add> self.tokenizer = tokenizer
<add>
<add> self.bos_token_id = tokenizer.bos_token_id
<add> self.eos_token_id = tokenizer.eos_token_id
<add> self.pad_token_id = tokenizer.pad_token_id
<add>
<add> self.batch_size = batch_size
<add> self.beam_size = beam_size
<add> self.min_length = min_length
<add> self.max_length = max_length
<add>
<add> self.block_repeating_trigram = block_repeating_trigrams
<add> self.apply_length_penalty = False if alpha == 0 else True
<add> self.alpha = alpha
<add>
<add> self._init_beam_state(batch_size)
<add>
<add> def __len__(self):
<add> try:
<add> return self.growing_beams.size(1)
<add> except NameError:
<add> return 0
<add>
<add> def _init_beam_state(self, batch_size):
<add> """ (re-)Initialize the state of the beams. """
<add> self.hypotheses = [[] for _ in range(batch_size)]
<add> self.batch_offset = torch.arange(batch_size, dtype=torch.long)
<add> self.beam_offset = torch.arange(
<add> 0, batch_size * self.beam_size, step=self.beam_size, dtype=torch.long
<add> )
<add> self.growing_beams = torch.full(
<add> (batch_size * self.beam_size, 1), self.bos_token_id, dtype=torch.long
<add> )
<add> self.topk_log_probabilities = torch.tensor(
<add> [0.0] + [float("-inf")] * (self.beam_size - 1), dtype=torch.float
<add> ).repeat(batch_size)
<add> self.results = {
<add> "predictions": [[] for _ in range(batch_size)],
<add> "scores": [[] for _ in range(batch_size)],
<add> }
<add> self._step = 0
<add> self.is_done = False
<add>
<add> def forward(self, encoder_input_ids, **model_kwargs):
<add> """ Generate a sequence using Beam Search. """
<add> # keyword arguments come in 3 flavors: encoder-specific (prefixed by
<add> # `encoder_`), decoder-specific (prefixed by `decoder_`) and those
<add> # that apply to the model as whole.
<add> # We let the specific kwargs override the common ones in case of conflict.
<add> kwargs_common = {
<add> argument: value
<add> for argument, value in model_kwargs.items()
<add> if not argument.startswith("encoder_") and not argument.startswith("decoder_")
<add> }
<add> kwargs_decoder = kwargs_common.copy()
<add> kwargs_encoder = kwargs_common.copy()
<add> kwargs_encoder.update(
<add> {
<add> argument[len("encoder_") :]: value
<add> for argument, value in model_kwargs.items()
<add> if argument.startswith("encoder_")
<add> }
<add> )
<add> kwargs_decoder.update(
<add> {
<add> argument[len("decoder_") :]: value
<add> for argument, value in model_kwargs.items()
<add> if argument.startswith("decoder_")
<add> }
<add> )
<add>
<add> # forward pass on the encoder
<add> encoder_outputs = self.model.encoder.forward(encoder_input_ids, kwargs_encoder)
<add> kwargs_decoder["encoder_hidden_states"] = tile(
<add> encoder_outputs, self.beam_size, dim=0
<add> )
<add>
<add> # grow the beam by generating sequences in an autoregressive way
<add> batch_size = encoder_input_ids.size(0)
<add> self._init_beam_state(batch_size)
<add> for step in range(self.max_length):
<add> # prepare the decoder input
<add> decoder_input = fit_to_block_size(
<add> self.growing_beams, self.tokenizer.pad_token_id
<add> )
<add> kwargs_decoder["decoder_lm_labels"] = build_lm_labels(
<add> decoder_input, self.tokenizer.pad_token_id
<add> )
<add> kwargs_decoder["decoder_attention_mask"] = build_mask(
<add> decoder_input, self.tokenizer.pad_token_id
<add> )
<add>
<add> outputs = self.model.decoder(decoder_input, kwargs_decoder)
<add> log_probabilities = torch.nn.functional.log_softmax(outputs[1])
<add> surviving_beams_rows = self.grow(log_probabilities)
<add> if self.is_done:
<add> break
<add>
<add> kwargs_decoder["encoder_hidden_states"] = kwargs_decoder[
<add> "encoder_hidden_states"
<add> ].index_select(0, surviving_beams_rows)
<add> kwargs_decoder["encoder_attention_mask"] = kwargs_decoder[
<add> "encoder_attention_mask"
<add> ].index_select(0, surviving_beams_rows)
<add>
<add> return self.results
<add>
<add> def grow(self, log_probabilities):
<add> """ Grow the beams by one step. """
<add> self._step += 1
<add>
<add> # The number of beams changes as some beams finish so we define _B
<add> vocab_size = log_probabilities.size(-1)
<add> _B = log_probabilities.size(0) // self.beam_size
<add>
<add> # Multiply each beam probability with the probability of the
<add> # next token (conditioned on the words in the beam).
<add> log_probabilities += self.topk_log_probabilities.view(-1, 1)
<add>
<add> self._enforce_min_length(log_probabilities)
<add> if self.block_repeating_trigram:
<add> self._remove_beams_with_repeating_trigrams(log_probabilities, _B)
<add>
<add> # Find the `beam_size` (previous_beam + token) combinations with
<add> # the highest score
<add> topk_log_probabilities, topk_ids = torch.topk(
<add> log_probabilities.view(_B, self.beam_size * vocab_size), self.beam_size, dim=1
<add> )
<add>
<add> # Apply the length penalty. The +1 accounts for the [EOS] token
<add> # that will be added if the beam ends.
<add> topk_scores = topk_log_probabilities
<add> if self.apply_length_penalty:
<add> topk_scores /= self._length_penalty()
<add>
<add> # Retrieve the corresponding respective beam and token id
<add> # topk_token_ids[i] will be added to topk_beam_ids[i]
<add> topk_beam_ids = topk_ids.div(vocab_size)
<add> topk_token_ids = topk_ids.fmod(vocab_size)
<add>
<add> # Retrieve the row index of the surviving beams in the original
<add> # view of the log_probabilities tensor
<add> surviving_beams_per_batch = topk_beam_ids + self.beam_offset[:_B].view(-1, 1)
<add> surviving_beams_rows = surviving_beams_per_batch.view(-1)
<add>
<add> # Append the last predictions
<add> self.growing_beams = torch.cat(
<add> [
<add> self.growing_beams.index_select(0, surviving_beams_rows),
<add> topk_token_ids.view(-1, 1),
<add> ],
<add> 1,
<add> )
<add>
<add> # Check if any of the beam searches has ended during this
<add> # growth step. Also if top beam (most probable) has ended
<add> # for one element of the batch.
<add> is_finished = topk_token_ids.eq(self.eos_token_id)
<add> self._enforce_max_length(is_finished)
<add> if is_finished.any():
<add> non_finished = self._cut_finished(is_finished, topk_scores)
<add> self.batch_offset = self.batch_offset.index_select(0, non_finished)
<add> surviving_beams_per_batch = surviving_beams_per_batch.index_select(
<add> 0, non_finished
<add> )
<add> self.topk_log_probabilities = self.topk_log_probabilities.index_select(
<add> 0, non_finished
<add> )
<add>
<add> surviving_beams_rows = surviving_beams_per_batch.view(-1)
<add> self.growing_beams = self.growing_beams.index_select(0, surviving_beams_rows)
<add>
<add> return surviving_beams_rows
<add>
<add> def _cut_finished(self, is_finished, topk_scores):
<add> """ Save the finished searches and cut the correponding sequences off
<add> the beams. """
<add> is_top_beam_finished = is_finished[:, 0].eq(True)
<add>
<add> # Save the finished searches
<add> predictions = self.growing_beams.view(
<add> -1, self.beam_size, self.growing_beams.size(1)
<add> )
<add> for i in range(is_finished.size(0)):
<add> if is_top_beam_finished[i]:
<add> is_finished[i].fill_(1)
<add> finished_hyp = is_finished[i].nonzero().view(-1)
<add>
<add> # Store the finished beams as a (score, prediction) hypothesis.
<add> b = self.batch_offset[i]
<add> for j in finished_hyp:
<add> self.hypotheses[b].append((topk_scores[i, j], predictions[i, j, :]))
<add>
<add> # If the batch reached the end, save the best hypotheses
<add> # in terms of length-penalized score.
<add> if is_top_beam_finished[i]:
<add> best_score, best_prediction = max(self.hypotheses[b], key=lambda x: x[0])
<add> self.results["scores"][b].append(best_score)
<add> self.results["predictions"][b].append(best_prediction)
<add>
<add> non_finished = is_top_beam_finished.eq(False).nonzero().view(-1)
<add> if len(non_finished) == 0:
<add> self.is_done = True
<add>
<add> return non_finished
<add>
<add> def _remove_beams_with_repeating_trigrams(self, log_probabilities, _B):
<add> if self._step + 1 > 3: # [BOS] does not count
<add> for i in range(_B * self.beam_size):
<add> tokens = self.growing_beams[i]
<add> trigrams = [
<add> (tokens[j - 1], tokens[j], tokens[j + 1])
<add> for j in range(1, len(self) - 1)
<add> ]
<add> last_trigram = tuple(trigrams[-1])
<add> if last_trigram in trigrams[:-1]:
<add> log_probabilities[i] = -1e20
<add>
<add> def _enforce_min_length(self, log_probabilities):
<add> if self._step < self.min_length:
<add> log_probabilities[:, self.eos_token_id] = -1e20
<add>
<add> def _enforce_max_length(self, is_finished):
<add> # +1 because we will need to add an [EOS] token
<add> if self._step + 1 == self.max_length:
<add> is_finished.fill_(1)
<add>
<add> def _length_penalty(self):
<add> """ The calculation of the length penalty follows that of [1].
<add>
<add> [1] Wu, Yonghui, et al. "Google's neural machine translation system:
<add> Bridging the gap between human and machine translation." arXiv preprint
<add> arXiv:1609.08144 (2016).
<add> """
<add> return ((5.0 + (self._step + 1)) / 6.0) ** self.alpha
<add>
<add>
<add>def tile(x, count, dim=0):
<add> """
<add> Tiles `x` along dimension `dim` `count` times.
<add>
<add> Example:
<add> >> ex = torch.tensor([1,2],[3,4])
<add> >> tile(ex, 2, 0)
<add> torch.Tensor([[1,2],[1,2],[3,4],[3,4]])
<add> """
<add> perm = list(range(len(x.size())))
<add> if dim != 0:
<add> perm[0], perm[dim] = perm[dim], perm[0]
<add> x = x.permute(perm).contiguous()
<add> out_size = list(x.size())
<add> out_size[0] *= count
<add> batch = x.size(0)
<add> x = (
<add> x.view(batch, -1)
<add> .transpose(0, 1)
<add> .repeat(count, 1)
<add> .transpose(0, 1)
<add> .contiguous()
<add> .view(*out_size)
<add> )
<add> if dim != 0:
<add> x = x.permute(perm).contiguous()
<add> return x
<add>
<add>
<add>def fit_to_block_size(sequence, block_size, pad_token_id):
<add> """ Adapt the source and target sequences' lengths to the block size.
<add> If the sequence is shorter we append padding tokens to the right.
<add> """
<add> if len(sequence) > block_size:
<add> return sequence[:block_size]
<add> else:
<add> sequence.extend([pad_token_id] * (block_size - len(sequence)))
<add> return sequence
<add>
<add>
<add>def build_lm_labels(sequence, pad_token_id):
<add> """ Padding token, encoded as 0, are represented by the value -1 so they
<add> are not taken into account in the loss computation. """
<add> padded = sequence.clone()
<add> padded[padded == pad_token_id] = -1
<add> return padded
<add>
<add>
<add>def build_mask(sequence, pad_token_id):
<add> """ Builds the mask. The attention mechanism will only attend to positions
<add> with value 1. """
<add> mask = torch.ones_like(sequence)
<add> idx_pad_tokens = sequence == pad_token_id
<add> mask[idx_pad_tokens] = 0
<add> return mask
<ide><path>transformers/modeling_beam_search.py
<del># coding=utf-8
<del># Copyright (c) 2019 Yang Liu
<del>
<del># Permission is hereby granted, free of charge, to any person obtaining a copy
<del># of this software and associated documentation files (the "Software"), to deal
<del># in the Software without restriction, including without limitation the rights
<del># to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
<del># copies of the Software, and to permit persons to whom the Software is
<del># furnished to do so, subject to the following conditions:
<del>
<del># The above copyright notice and this permission notice shall be included in all
<del># copies or substantial portions of the Software.
<del>
<del># THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
<del># IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<del># FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
<del># AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
<del># LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
<del># OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
<del># SOFTWARE.
<del>"""
<del>A general wrapper around models with LM heads to generate sequences
<del>using beam search.
<del>"""
<del>import torch
<del>from torch import nn
<del>
<del>
<del>class TransformerBeamSearch(nn.Module):
<del> def __init__(
<del> self,
<del> model,
<del> tokenizer,
<del> batch_size,
<del> beam_size,
<del> min_length,
<del> max_length,
<del> alpha=0,
<del> block_repeating_trigram=True,
<del> ):
<del> """
<del> Attributes:
<del> mask_word_id: token id that corresponds to the mask
<del> """
<del> super(TransformerBeamSearch, self).__init__()
<del> self.model = model
<del> self.tokenizer = tokenizer
<del>
<del> self.start_token_id = tokenizer.start_token_id
<del> self.end_token_id = tokenizer.end_token_id
<del> self.pad_token_id = tokenizer.pad_token_id
<del>
<del> self.beam_size = beam_size
<del> self.min_length = min_length
<del> self.max_length = max_length
<del>
<del> self.block_repeating_trigram = block_repeating_trigram
<del> self.apply_length_penalty = False if alpha == 0 else True
<del> self.alpha = alpha
<del>
<del> # State of the beam
<del> self.hypotheses = [[] for _ in range(batch_size)]
<del> self.batch_offset = torch.arange(batch_size, dtype=torch.long)
<del> self.beam_offset = torch.arange(
<del> 0, batch_size * self.beam_size, step=self.beam_size, dtype=torch.long
<del> )
<del> self.growing_beam = torch.full(
<del> (batch_size * self.beam_size, 1), self.start_token_id, dtype=torch.long
<del> )
<del> self.topk_log_probabilities = torch.tensor(
<del> [0.0] + [float("-inf")] * (self.beam_size - 1), dtype=torch.float
<del> ).repeat(batch_size)
<del> self.results = {
<del> "prediction": [[] for _ in batch_size],
<del> "scores": [[] for _ in batch_size],
<del> }
<del> self._step = 0
<del> self.is_done = False
<del>
<del> def step(self, log_probabilities):
<del> """ Grows the beam by one step. """
<del> self._step += 1
<del>
<del> # The batch size changes as some beams finish so we define _B
<del> vocab_size = log_probabilities.size(-1)
<del> _B = log_probabilities.size(0) // self.beam_size
<del>
<del> # Multiply each beam probability with the probability of the
<del> # next token (conditioned on the words in the beam).
<del> log_probabilities += self.topk_log_probabilities.view(-1, 1)
<del>
<del> self.enforce_min_length(log_probabilities)
<del> if self.block_repeating_trigram:
<del> self.remove_repeating_trigrams(log_probabilities, _B)
<del>
<del> # Find the `beam_size` (previous_beam + token) combinations with
<del> # the highest score
<del> topk_log_probabilities, topk_ids = log_probabilities.topk(
<del> log_probabilities.view(_B, self.beam_size * vocab_size),
<del> self.beam_size,
<del> dim=1,
<del> )
<del>
<del> # Apply the length penalty. The +1 accounts for the [EOS] token
<del> # that will be added if the beam ends.
<del> topk_scores = topk_log_probabilities / self.length_penalty()
<del>
<del> # Retrieve the corresponding respective beam and token id
<del> # topk_token_ids[i] will be added to topk_beam_ids[i]
<del> topk_beam_ids = topk_ids.div(vocab_size)
<del> topk_token_ids = topk_ids.fmod(vocab_size)
<del>
<del> # Retrieve the row index of the surviving beams in the original
<del> # view of the log_probabilities tensor
<del> surviving_beams_rows = (topk_beam_ids + self.beam_offset[:_B].view(-1, 1)).view(
<del> -1
<del> )
<del>
<del> # Append the last predictions
<del> self.growing_beam = torch.cat(
<del> [
<del> self.growing_beam.index_select(0, surviving_beams_rows),
<del> topk_token_ids.view(-1, 1),
<del> ],
<del> 1,
<del> )
<del>
<del> # Check if any of the beam searches has ended during this
<del> # growth step. Also if top beam (most probable) has ended
<del> # for one element of the batch.
<del> is_finished = topk_token_ids.eq(self.end_token_id)
<del> self.enforce_max_length()
<del> is_top_beam_finished = is_finished[:, 0].eq(1)
<del>
<del> # Save the finished searches
<del> if is_finished.any():
<del> predictions = self.growing_beam.view(
<del> -1, self.beam_size, self.growing_beam.size(1)
<del> )
<del> for i in range(is_finished.size(0)):
<del> if is_top_beam_finished[i]:
<del> is_finished[i].fill_(1)
<del> finished_hyp = is_finished[i].nonzero().view(-1)
<del>
<del> # Store finished hypotheses for this batch.
<del> b = self.batch_offset[i]
<del> for j in finished_hyp:
<del> self.hypotheses[b].append((topk_scores[i, j], predictions[i, j, :]))
<del>
<del> # If the batch reached the end, save the best hypotheses
<del> # in terms of length-penalized score.
<del> if is_top_beam_finished[i]:
<del> best_hyp = sorted(
<del> self.hypotheses[b], key=lambda x: x[0], reverse=True
<del> )
<del> best_score, best_prediction = best_hyp[0]
<del> self.results["scores"][b].append(best_score)
<del> self.results["predictions"][b].append(best_prediction)
<del>
<del> non_finished = is_top_beam_finished.eq(0).nonzero().view(-1)
<del> if len(non_finished) == 0:
<del> self.is_done = True
<del>
<del> # Remove finished batches for the next step.
<del> topk_log_probabilities = topk_log_probabilities.index_select(
<del> 0, non_finished
<del> )
<del> self.batch_offset = self.batch_offset.index_select(0, non_finished)
<del> self.growing_beam = predictions.index_select(0, non_finished).view(
<del> -1, self.growing_beam.size(-1)
<del> )
<del>
<del> surviving_beams_rows = surviving_beams_rows.index_select(0, non_finished)
<del>
<del> return surviving_beams_rows
<del>
<del> def forward(self, encoder_input_ids, **kwargs):
<del> # keyword arguments come in 3 flavors: encoder-specific (prefixed by
<del> # `encoder_`), decoder-specific (prefixed by `decoder_`) and those
<del> # that apply to the model as whole.
<del> # We let the specific kwargs override the common ones in case of conflict.
<del> kwargs_encoder = {
<del> argument[len("encoder_"):]: value
<del> for argument, value in kwargs.items()
<del> if argument.startswith("encoder_")
<del> }
<del> kwargs_decoder = {
<del> argument[len("decoder_"):]: value
<del> for argument, value in kwargs.items()
<del> if argument.startswith("decoder_")
<del> }
<del> kwargs_common = {
<del> argument: value
<del> for argument, value in kwargs.items()
<del> if not (argument.startswith("encoder_") or argument.startswith("decoder_"))
<del> }
<del> kwargs_decoder = dict(kwargs_common, **kwargs_decoder)
<del> kwargs_encoder = dict(kwargs_common, **kwargs_encoder)
<del>
<del> # forward pass on the encoder
<del> encoder_outputs = self.model.encoder.forward(encoder_input_ids, kwargs_encoder)
<del> kwargs_decoder["encoder_hidden_states"] = tile(
<del> encoder_outputs, self.beam_size, dim=0
<del> )
<del>
<del> # grow the beam by generating sequences in an autoregressive way
<del> self.growing_beam = torch.full(
<del> (self.batch_size * self.beam_size, 1), self.start_token_id, dtype=torch.long
<del> )
<del> for step in range(self.max_length):
<del> decoder_input = self.growing_beam[:, -1]
<del> outputs = self.model.decoder(decoder_input, kwargs_decoder)
<del> log_probabilities = torch.nn.functional.log_softmax(outputs[1])
<del> surviving_beams_rows = self.step(log_probabilities)
<del> if self.is_done:
<del> break
<del>
<del> kwargs_decoder["encoder_hidden_states"] = kwargs_decoder[
<del> "encoder_hidden_states"
<del> ].index_select(0, surviving_beams_rows)
<del>
<del> return self.results
<del>
<del> def remove_repeating_trigrams(self, log_probabilities, _B):
<del> if(self._step + 1 > 3):
<del> for i in range(_B * self.beam_size):
<del> tokens = [t for t in self.growing_beam[i]]
<del> trigrams = [(tokens[i-1], tokens[i], tokens[i+1]) for i in range(1, len(words) - 1)]
<del> last_trigram = tuple(trigrams[-1])
<del> if last_trigram in trigrams[:-1]:
<del> log_probabilities[i] = -1e20
<del>
<del> def enforce_min_length(self):
<del> if self._step < self.min_length:
<del> self.log_probabilities[self.end_token_id] = -1e20
<del>
<del> def enforce_max_length(self):
<del> if self._step + 1 == self.max_length:
<del> self.is_finished.fill_(1)
<del>
<del> def length_penalty(self):
<del> return ((5.0 + (self._step + 1)) / 6.0) ** self.alpha
<del>
<del>
<del>def tile(x, count, dim=0):
<del> """
<del> Tiles `x` along dimension `dim` `count` times.
<del>
<del> Example:
<del> >> ex = torch.tensor([1,2],[3,4])
<del> >> tile(ex, 2, 0)
<del> torch.Tensor([[1,2],[1,2],[3,4],[3,4]])
<del> """
<del> perm = list(range(len(x.size())))
<del> if dim != 0:
<del> perm[0], perm[dim] = perm[dim], perm[0]
<del> x = x.permute(perm).contiguous()
<del> out_size = list(x.size())
<del> out_size[0] *= count
<del> batch = x.size(0)
<del> x = (
<del> x.view(batch, -1)
<del> .transpose(0, 1)
<del> .repeat(count, 1)
<del> .transpose(0, 1)
<del> .contiguous()
<del> .view(*out_size)
<del> )
<del> if dim != 0:
<del> x = x.permute(perm).contiguous()
<del> return x
<ide><path>transformers/tests/beam_search_tests.py
<add>from collections import namedtuple
<add>import unittest
<add>
<add>import numpy as np
<add>import torch
<add>
<add>from transformers.generate import BeamSearch
<add>from transformers import PreTrainedEncoderDecoder
<add>
<add>
<add>StubTokenizer = namedtuple("Tokenizer", ["bos_token_id", "eos_token_id", "pad_token_id"])
<add>StubTransformer = namedtuple("Transformer", ["encoder", "decoder"])
<add>
<add>
<add>class BeamSearchtest(unittest.TestCase):
<add> def test_beam_search_encoder_decoder_integration(self):
<add> """ We make sure that no internal change in the PreTrainedEncoderDecoder
<add> class will break the integration with the beam search.
<add> """
<add>
<add> model = PreTrainedEncoderDecoder("encoder", "decoder")
<add> tokenizer = StubTokenizer(0, 1, 2)
<add> try:
<add> _ = BeamSearch(
<add> model=model,
<add> tokenizer=tokenizer,
<add> batch_size=1,
<add> beam_size=1,
<add> min_length=1,
<add> max_length=1,
<add> alpha=0,
<add> block_repeating_trigrams=False,
<add> )
<add> except:
<add> self.fail("Instantiating BeamSearch with a PreTrainedEncoderDecoder failed.")
<add>
<add> def test_beam_search_min_length(self):
<add> """ We keep predicting the end_token for the first beam and check that
<add> it is not marked as finished until the beam has reached the minimum
<add> length. """
<add> eos_idx = 3
<add> vocab_size = 10
<add>
<add> batch_size = 3
<add> beam_size = 2
<add> min_length = 5
<add>
<add> beam = BeamSearch(
<add> model=StubTransformer("encoder", "decoder"),
<add> tokenizer=StubTokenizer(bos_token_id=0, eos_token_id=eos_idx, pad_token_id=2),
<add> batch_size=batch_size,
<add> beam_size=beam_size,
<add> min_length=5,
<add> max_length=10,
<add> alpha=0,
<add> block_repeating_trigrams=False,
<add> )
<add>
<add> # To test that the minimum length is correctly enforced we constantly
<add> # assign the highest probability to the [EOS] token (and assign lower
<add> # probabilities to some other tokens).
<add> # Since BeamSearch will reset its probability to 1e-20 as long as
<add> # min_length has not been reached, we need to reset the value between
<add> # steps.
<add> non_eos_idxs = [4, 5, 1, 8, 9]
<add> score_distribution = torch.log_softmax(
<add> torch.tensor([6.0, 5.0, 4.0, 3.0, 2.0, 1.0]), dim=0
<add> )
<add>
<add> log_probabilities = torch.full((batch_size * beam_size, vocab_size), float("-inf"))
<add> log_probabilities[0, eos_idx] = score_distribution[0]
<add> for idx, score in zip(non_eos_idxs, score_distribution[1:]):
<add> log_probabilities[0, idx] = score
<add>
<add> for step in range(1, min_length + 2):
<add> log_probabilities[0, eos_idx] = score_distribution[0]
<add>
<add> # Beam #3 and #4 teminate at the first step since the probability
<add> # of the [EOS] token is -1e20 > -\infty so there are only two beams left.
<add> surviving_beams_rows = beam.grow(log_probabilities)
<add> if step < min_length:
<add> np.testing.assert_array_equal(
<add> beam.growing_beams.numpy(),
<add> np.repeat(np.array([[0] + [4] * step]), 2, axis=0),
<add> )
<add> elif step == min_length:
<add> np.testing.assert_array_equal(surviving_beams_rows.numpy(), np.array([]))
<add> self.assertTrue(beam.is_done)
<add> break
<add>
<add> log_probabilities = log_probabilities.index_select(0, surviving_beams_rows)
<add>
<add> def test_beam_search_max_length(self):
<add> """ We keep predicting the same non-EOS token until we reach the
<add> maximum permitted length """
<add> batch_size = 3
<add> beam_size = 2
<add> max_length = 5
<add> vocab_size = 10
<add>
<add> beam = BeamSearch(
<add> model=StubTransformer("encoder", "decoder"),
<add> tokenizer=StubTokenizer(bos_token_id=0, eos_token_id=1, pad_token_id=2),
<add> batch_size=batch_size,
<add> beam_size=beam_size,
<add> min_length=2,
<add> max_length=max_length,
<add> alpha=0,
<add> block_repeating_trigrams=False,
<add> )
<add>
<add> log_probabilities = torch.full((batch_size * beam_size, vocab_size), float("-inf"))
<add>
<add> # To test that beam search enforces the max length constraint we
<add> # keep giving the highest probability to a token that is not the
<add> # [EOS] token.
<add> # The beam search will stop at max_length-1, assuming that one would
<add> # add the [EOS] token at the end of the returned sequence.
<add> token_idxs = [3, 4, 5]
<add> score_distribution = torch.log_softmax(torch.tensor([10.0, 6.0, 4.0]), dim=0)
<add> for idx, score in zip(token_idxs, score_distribution):
<add> log_probabilities[:, idx] = score
<add>
<add> for step in range(1, max_length + 2):
<add> surviving_beams_rows = beam.grow(log_probabilities)
<add> if step + 1 < max_length:
<add> self.assertFalse(beam.is_done)
<add> elif step + 1 == max_length: # Now [EOS] is the most probable token
<add> np.testing.assert_array_equal(surviving_beams_rows.numpy(), np.array([]))
<add> self.assertTrue(beam.is_done)
<add> break
<add>
<add> log_probabilities = log_probabilities.index_select(0, surviving_beams_rows)
<add>
<add> def test_beam_search_block_repeating_trigrams(self):
<add> """ We make sure that the beams that contain repeating trigrams are removed. """
<add> batch_size = 3
<add> beam_size = 2
<add> max_length = 10
<add> vocab_size = 10
<add>
<add> beam = BeamSearch(
<add> model=StubTransformer("encoder", "decoder"),
<add> tokenizer=StubTokenizer(bos_token_id=0, eos_token_id=1, pad_token_id=2),
<add> batch_size=batch_size,
<add> beam_size=beam_size,
<add> min_length=2,
<add> max_length=max_length,
<add> alpha=0,
<add> block_repeating_trigrams=True,
<add> )
<add>
<add> log_probabilities = torch.full((batch_size * beam_size, vocab_size), float("-inf"))
<add>
<add> # To test that BeamSearch enforces the 3-gram constraint we give the
<add> # highest probably to the same tokens in a cyclic fashion and make sure
<add> # they disappear once the cycle has completed.
<add> token_idxs = [3, 4, 5]
<add> score_distribution = torch.log_softmax(torch.tensor([10.0, 6.0, 4.0]), dim=0)
<add> for idx, score in zip(token_idxs, score_distribution):
<add> log_probabilities[:, idx] = score
<add>
<add> for step in range(1, max_length + 2):
<add> # Rotate the probabilities at each step
<add> for idx in token_idxs:
<add> score = score_distribution[(idx + step) % 3]
<add> log_probabilities[::beam_size, idx] = score
<add>
<add> surviving_beams_rows = beam.grow(log_probabilities)
<add> log_probabilities = log_probabilities.index_select(0, surviving_beams_rows)
<add>
<add> if step < 7:
<add> self.assertFalse(
<add> np.array_equal(
<add> log_probabilities.numpy()[0, :],
<add> np.array([-1e20] * vocab_size, dtype="float32"),
<add> )
<add> )
<add> if step == 7:
<add> np.testing.assert_array_equal(
<add> log_probabilities.numpy()[0, :],
<add> np.array([-1e20] * vocab_size, dtype="float32"),
<add> )
<add>
<add> def test_beam_search_example_for_one_step(self):
<add> """ We test that the predictions for one step of growth are correct. """
<add> batch_size = 2
<add> beam_size = 2
<add> max_length = 10
<add> vocab_size = 5
<add>
<add> beam = BeamSearch(
<add> model=StubTransformer("encoder", "decoder"),
<add> tokenizer=StubTokenizer(bos_token_id=0, eos_token_id=1, pad_token_id=2),
<add> batch_size=batch_size,
<add> beam_size=beam_size,
<add> min_length=2,
<add> max_length=max_length,
<add> alpha=0,
<add> block_repeating_trigrams=False,
<add> )
<add>
<add> log_probabilities = torch.full((batch_size * beam_size, vocab_size), float("-inf"))
<add> log_probabilities[0, 3:] = torch.log_softmax(torch.tensor([2.0, 1.0]), dim=0)
<add> log_probabilities[2, 3:] = torch.log_softmax(torch.tensor([1.0, 2.0]), dim=0)
<add>
<add> # First pass
<add> surviving_beams_rows = beam.grow(log_probabilities)
<add> np.testing.assert_array_equal(surviving_beams_rows.numpy(), np.array([0, 0, 2, 2]))
<add> np.testing.assert_array_equal(
<add> beam.growing_beams.numpy(), np.array([[0, 3], [0, 4], [0, 4], [0, 3]])
<add> )
<add> self.assertFalse(beam.is_done)
<add>
<add> # Second pass
<add> surviving_beams_rows = beam.grow(log_probabilities)
<add> np.testing.assert_array_equal(surviving_beams_rows.numpy(), np.array([0, 0, 2, 2]))
<add> np.testing.assert_array_equal(
<add> beam.growing_beams.numpy(),
<add> np.array([[0, 3, 3], [0, 3, 4], [0, 4, 4], [0, 4, 3]]),
<add> )
<add> self.assertFalse(beam.is_done)
<add>
<add>
<add>if __name__ == "__name__":
<add> unittest.main() | 6 |
Python | Python | revise some of the docstrings | 8e74c991d1336bd51e71dc492544223055c130f4 | <ide><path>numpy/core/fromnumeric.py
<ide> def diagonal(a, offset=0, axis1=0, axis2=1):
<ide> Returns
<ide> -------
<ide> array_of_diagonals : ndarray
<del> If `a` is 2-D, a 1-D array of the same type as `a` containing the
<del> diagonal is returned (except if `a` is a `matrix`, in which case a 1-D
<del> array containing the diagonal is returned in order to maintain
<del> backward compatibility).
<add> If `a` is 2-D, then a 1-D array containing the diagonal and of the
<add> same type as `a` is returned unless `a` is a `matrix`, in which case
<add> a 1-D array rather than a (2-D) `matrix` is returned in order to
<add> maintain backward compatibility.
<add>
<ide> If ``a.ndim > 2``, then the dimensions specified by `axis1` and `axis2`
<ide> are removed, and a new axis inserted at the end corresponding to the
<ide> diagonal.
<ide> def ravel(a, order='C'):
<ide> Returns
<ide> -------
<ide> y : array_like
<del> y is an array of the same subtype as `a`, with shape ``(a.size,)``
<del> (Note: matrices are special-cases for backward compatibility: if `a`
<del> is a matrix, y is a 1-D ndarray.)
<add> y is an array of the same subtype as `a`, with shape ``(a.size,)``.
<add> Note that matrices are special cased for backward compatibility, if `a`
<add> is a matrix, then y is a 1-D ndarray.
<ide>
<ide> See Also
<ide> -------- | 1 |
Python | Python | add support for camembert in run_ner example | 56c84863a1a20dfb82b928c5c9f77c21d9def8c7 | <ide><path>examples/run_ner.py
<ide> from transformers import WEIGHTS_NAME, BertConfig, BertForTokenClassification, BertTokenizer
<ide> from transformers import RobertaConfig, RobertaForTokenClassification, RobertaTokenizer
<ide> from transformers import DistilBertConfig, DistilBertForTokenClassification, DistilBertTokenizer
<add>from transformers import CamembertConfig, CamembertForTokenClassification, CamembertTokenizer
<ide>
<ide> logger = logging.getLogger(__name__)
<ide>
<ide> MODEL_CLASSES = {
<ide> "bert": (BertConfig, BertForTokenClassification, BertTokenizer),
<ide> "roberta": (RobertaConfig, RobertaForTokenClassification, RobertaTokenizer),
<del> "distilbert": (DistilBertConfig, DistilBertForTokenClassification, DistilBertTokenizer)
<add> "distilbert": (DistilBertConfig, DistilBertForTokenClassification, DistilBertTokenizer),
<add> "camembert": (CamembertConfig, CamembertForTokenClassification, CamembertTokenizer),
<ide> }
<ide>
<ide> | 1 |
Text | Text | release notes for 0.9.15 lethal-stutter release | 97573c39305a4d91c14703f1064bdfdd7153b080 | <ide><path>CHANGELOG.md
<ide> <a name="0.9.15"><a/>
<del># <angular/> 0.9.15 lethal-stutter (in-progress) #
<add># <angular/> 0.9.15 lethal-stutter (2011-04-11) #
<ide>
<add>### Features
<add>- IE9 support
<add>
<add>
<add>### Bug Fixes
<add>- reverted [ng:view] sync cache fix due to regression in the order of initialization of parent
<add> and child controllers. (commits 9bd2c396 and 3d388498)
<add>- [$resource] success callback is now executed whenever the http status code is <200,300>
<add>
<add>
<add>### Docs
<add>- fixed intentation code that caused some of the snippets on docs.angularjs.org to be mangled.
<add>- many small improvements of the api docs.
<ide>
<ide>
<ide> | 1 |
Javascript | Javascript | add object.entries and object.values polyfill | 598df0bc201e4a177d51bd3d29a4249b997987df | <ide><path>packager/react-packager/src/Resolver/index.js
<ide> class Resolver {
<ide> path.join(__dirname, 'polyfills/String.prototype.es6.js'),
<ide> path.join(__dirname, 'polyfills/Array.prototype.es6.js'),
<ide> path.join(__dirname, 'polyfills/Array.es6.js'),
<add> path.join(__dirname, 'polyfills/Object.es7.js'),
<ide> path.join(__dirname, 'polyfills/babelHelpers.js'),
<ide> ].concat(this._polyfillModuleNames);
<ide>
<ide><path>packager/react-packager/src/Resolver/polyfills/Object.es7.js
<add>/**
<add> * Copyright 2004-present Facebook. All Rights Reserved.
<add> *
<add> * @provides Object.es7
<add> * @polyfill
<add> */
<add>
<add>(function() {
<add>
<add> const hasOwnProperty = Object.prototype.hasOwnProperty;
<add>
<add> /**
<add> * Returns an array of the given object's own enumerable entries.
<add> * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/entries
<add> *
<add> */
<add> if (typeof Object.entries !== 'function') {
<add> Object.entries = function(object) {
<add> // `null` and `undefined` values are not allowed.
<add> if (object == null) {
<add> throw new TypeError('Object.entries called on non-object');
<add> }
<add>
<add> let entries = [];
<add> for (let key in object) {
<add> if (hasOwnProperty.call(object, key)) {
<add> entries.push([key, object[key]]);
<add> }
<add> }
<add> return entries;
<add> };
<add> }
<add>
<add> /**
<add> * Returns an array of the given object's own enumerable entries.
<add> * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/values
<add> *
<add> */
<add> if (typeof Object.values !== 'function') {
<add> Object.values = function(object) {
<add> // `null` and `undefined` values are not allowed.
<add> if (object == null) {
<add> throw new TypeError('Object.values called on non-object');
<add> }
<add>
<add> let values = [];
<add> for (let key in object) {
<add> if (hasOwnProperty.call(object, key)) {
<add> values.push(object[key]);
<add> }
<add> }
<add> return values;
<add> };
<add> }
<add>
<add>})();
<ide>\ No newline at end of file
<ide><path>packager/react-packager/src/Resolver/polyfills/__tests__/Object.es7-test.js
<add>/**
<add> * Copyright 2004-present Facebook. All Rights Reserved.
<add> *
<add> * @emails oncall+jsinfra
<add> */
<add>
<add> /* eslint-disable fb-www/object-create-only-one-param */
<add>
<add>jest.autoMockOff();
<add>
<add>describe('Object (ES7)', () => {
<add> beforeEach(() => {
<add> delete Object.entries;
<add> delete Object.values;
<add> jest.resetModuleRegistry();
<add> require('../Object.es7');
<add> });
<add>
<add> describe('Object.entries', () => {
<add> it('should have a length of 1', () => {
<add> expect(Object.entries.length).toBe(1);
<add> });
<add>
<add> it('should check for type', () => {
<add> expect(Object.entries.bind(null, null)).toThrow(TypeError(
<add> 'Object.entries called on non-object'
<add> ));
<add> expect(Object.entries.bind(null, undefined)).toThrow(TypeError(
<add> 'Object.entries called on non-object'
<add> ));
<add> expect(Object.entries.bind(null, [])).not.toThrow();
<add> expect(Object.entries.bind(null, () => {})).not.toThrow();
<add> expect(Object.entries.bind(null, {})).not.toThrow();
<add> expect(Object.entries.bind(null, 'abc')).not.toThrow();
<add> });
<add>
<add> it('should return enumerable entries', () => {
<add> let foo = Object.defineProperties({}, {
<add> x: {value: 10, enumerable: true},
<add> y: {value: 20},
<add> });
<add>
<add> expect(Object.entries(foo)).toEqual([['x', 10]]);
<add>
<add> let bar = {x: 10, y: 20};
<add> expect(Object.entries(bar)).toEqual([['x', 10], ['y', 20]]);
<add> });
<add>
<add> it('should work with proto-less objects', () => {
<add> let foo = Object.create(null, {
<add> x: {value: 10, enumerable: true},
<add> y: {value: 20},
<add> });
<add>
<add> expect(Object.entries(foo)).toEqual([['x', 10]]);
<add> });
<add>
<add> it('should return only own entries', () => {
<add> let foo = Object.create({z: 30}, {
<add> x: {value: 10, enumerable: true},
<add> y: {value: 20},
<add> });
<add>
<add> expect(Object.entries(foo)).toEqual([['x', 10]]);
<add> });
<add>
<add> it('should convert to object primitive string', () => {
<add> expect(Object.entries('ab')).toEqual([['0', 'a'], ['1', 'b']]);
<add> });
<add> });
<add>
<add> describe('Object.values', () => {
<add> it('should have a length of 1', () => {
<add> expect(Object.values.length).toBe(1);
<add> });
<add>
<add> it('should check for type', () => {
<add> expect(Object.values.bind(null, null)).toThrow(TypeError(
<add> 'Object.values called on non-object'
<add> ));
<add> expect(Object.values.bind(null, [])).not.toThrow();
<add> expect(Object.values.bind(null, () => {})).not.toThrow();
<add> expect(Object.values.bind(null, {})).not.toThrow();
<add> });
<add>
<add> it('should return enumerable values', () => {
<add> let foo = Object.defineProperties({}, {
<add> x: {value: 10, enumerable: true},
<add> y: {value: 20},
<add> });
<add>
<add> expect(Object.values(foo)).toEqual([10]);
<add>
<add> let bar = {x: 10, y: 20};
<add> expect(Object.values(bar)).toEqual([10, 20]);
<add> });
<add>
<add> it('should work with proto-less objects', () => {
<add> let foo = Object.create(null, {
<add> x: {value: 10, enumerable: true},
<add> y: {value: 20},
<add> });
<add>
<add> expect(Object.values(foo)).toEqual([10]);
<add> });
<add>
<add> it('should return only own values', () => {
<add> let foo = Object.create({z: 30}, {
<add> x: {value: 10, enumerable: true},
<add> y: {value: 20},
<add> });
<add>
<add> expect(Object.values(foo)).toEqual([10]);
<add> });
<add>
<add> it('should convert to object primitive string', () => {
<add> expect(Object.values('ab')).toEqual(['a', 'b']);
<add> });
<add> });
<add>});
<ide>\ No newline at end of file | 3 |
Ruby | Ruby | remove unused params | 6ddd65c9f366c5bca70cfce770d144e2fca1e679 | <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb
<ide> def add_route(action, controller, options, _path, to, via, formatted, anchor, op
<ide> ast = Journey::Parser.parse path
<ide>
<ide> mapping = Mapping.build(@scope, @set, ast, controller, default_action, to, via, formatted, options_constraints, anchor, options)
<del> @set.add_route(mapping, ast, as, anchor)
<add> @set.add_route(mapping, as)
<ide> end
<ide>
<ide> def match_root_route(options)
<ide><path>actionpack/lib/action_dispatch/routing/route_set.rb
<ide> def empty?
<ide> routes.empty?
<ide> end
<ide>
<del> def add_route(mapping, path_ast, name, anchor)
<add> def add_route(mapping, name)
<ide> raise ArgumentError, "Invalid route name: '#{name}'" unless name.blank? || name.to_s.match(/^[_a-z]\w*$/i)
<ide>
<ide> if name && named_routes[name] | 2 |
Text | Text | add a note about the auto-created empty directory | 4196822b9ce82b86041ec627fd37aedddd59bdb4 | <ide><path>docs/getting-started.md
<ide> If you have questions about anything related to Next.js, you're always welcome t
<ide>
<ide> ## Automatic Setup
<ide>
<del>We recommend creating a new Next.js app using `create-next-app`, which sets up everything automatically for you. To create a project, run:
<add>We recommend creating a new Next.js app using `create-next-app`, which sets up everything automatically for you. (You don't need to create an empty directory. `create-next-app` will make one for you.) To create a project, run:
<ide>
<ide> ```bash
<ide> npx create-next-app@latest | 1 |
Javascript | Javascript | remove keymirror usage in jstimers | d99623fcdc662cd329477bf42ca8740ee4433fe7 | <ide><path>Libraries/JavaScriptAppEngine/System/JSTimers/JSTimers.js
<ide> // in dependencies. NativeModules > BatchedBridge > MessageQueue > JSTimersExecution
<ide> const RCTTiming = require('NativeModules').Timing;
<ide> const JSTimersExecution = require('JSTimersExecution');
<add>
<ide> const parseErrorStack = require('parseErrorStack');
<ide>
<add>import type {JSTimerType} from 'JSTimersExecution';
<add>
<ide> // Returns a free index if one is available, and the next consecutive index otherwise.
<ide> function _getFreeIndex(): number {
<ide> let freeIndex = JSTimersExecution.timerIDs.indexOf(null);
<ide> function _getFreeIndex(): number {
<ide> return freeIndex;
<ide> }
<ide>
<del>function _allocateCallback(func: Function, type: $Keys<typeof JSTimersExecution.Type>): number {
<add>function _allocateCallback(func: Function, type: JSTimerType): number {
<ide> const id = JSTimersExecution.GUID++;
<ide> const freeIndex = _getFreeIndex();
<ide> JSTimersExecution.timerIDs[freeIndex] = id;
<ide> function _freeCallback(timerID: number) {
<ide> if (index !== -1) {
<ide> JSTimersExecution._clearIndex(index);
<ide> const type = JSTimersExecution.types[index];
<del> if (type !== JSTimersExecution.Type.setImmediate &&
<del> type !== JSTimersExecution.Type.requestIdleCallback) {
<add> if (type !== 'setImmediate' && type !== 'requestIdleCallback') {
<ide> RCTTiming.deleteTimer(timerID);
<ide> }
<ide> }
<ide> const JSTimers = {
<ide> * @param {number} duration Number of milliseconds.
<ide> */
<ide> setTimeout: function(func: Function, duration: number, ...args?: any): number {
<del> const id = _allocateCallback(() => func.apply(undefined, args),
<del> JSTimersExecution.Type.setTimeout);
<add> const id = _allocateCallback(() => func.apply(undefined, args), 'setTimeout');
<ide> RCTTiming.createTimer(id, duration || 0, Date.now(), /* recurring */ false);
<ide> return id;
<ide> },
<ide> const JSTimers = {
<ide> * @param {number} duration Number of milliseconds.
<ide> */
<ide> setInterval: function(func: Function, duration: number, ...args?: any): number {
<del> const id = _allocateCallback(() => func.apply(undefined, args),
<del> JSTimersExecution.Type.setInterval);
<add> const id = _allocateCallback(() => func.apply(undefined, args), 'setInterval');
<ide> RCTTiming.createTimer(id, duration || 0, Date.now(), /* recurring */ true);
<ide> return id;
<ide> },
<ide> const JSTimers = {
<ide> * current JavaScript execution loop.
<ide> */
<ide> setImmediate: function(func: Function, ...args?: any) {
<del> const id = _allocateCallback(() => func.apply(undefined, args),
<del> JSTimersExecution.Type.setImmediate);
<add> const id = _allocateCallback(() => func.apply(undefined, args), 'setImmediate');
<ide> JSTimersExecution.immediates.push(id);
<ide> return id;
<ide> },
<ide> const JSTimers = {
<ide> * @param {function} func Callback to be invoked every frame.
<ide> */
<ide> requestAnimationFrame: function(func : Function) {
<del> const id = _allocateCallback(func, JSTimersExecution.Type.requestAnimationFrame);
<add> const id = _allocateCallback(func, 'requestAnimationFrame');
<ide> RCTTiming.createTimer(id, 1, Date.now(), /* recurring */ false);
<ide> return id;
<ide> },
<ide> const JSTimers = {
<ide> RCTTiming.setSendIdleEvents(true);
<ide> }
<ide>
<del> const id = _allocateCallback(func, JSTimersExecution.Type.requestIdleCallback);
<add> const id = _allocateCallback(func, 'requestIdleCallback');
<ide> JSTimersExecution.requestIdleCallbacks.push(id);
<ide> return id;
<ide> },
<ide><path>Libraries/JavaScriptAppEngine/System/JSTimers/JSTimersExecution.js
<ide> const Systrace = require('Systrace');
<ide>
<ide> const invariant = require('fbjs/lib/invariant');
<del>const keyMirror = require('fbjs/lib/keyMirror');
<ide> const performanceNow = require('fbjs/lib/performanceNow');
<ide> const warning = require('fbjs/lib/warning');
<ide>
<ide> const IDLE_CALLBACK_FRAME_DEADLINE = 1;
<ide>
<ide> let hasEmittedTimeDriftWarning = false;
<ide>
<add>export type JSTimerType =
<add> 'setTimeout' |
<add> 'setInterval' |
<add> 'requestAnimationFrame' |
<add> 'setImmediate' |
<add> 'requestIdleCallback';
<add>
<ide> /**
<ide> * JS implementation of timer functions. Must be completely driven by an
<ide> * external clock signal, all that's stored here is timerID, timer type, and
<ide> * callback.
<ide> */
<ide> const JSTimersExecution = {
<ide> GUID: 1,
<del> Type: keyMirror({
<del> setTimeout: null,
<del> setInterval: null,
<del> requestAnimationFrame: null,
<del> setImmediate: null,
<del> requestIdleCallback: null,
<del> }),
<ide>
<ide> // Parallel arrays
<del> callbacks: [],
<del> types: [],
<del> timerIDs: [],
<add> callbacks: ([] : Array<?Function>),
<add> types: ([] : Array<?JSTimerType>),
<add> timerIDs: ([] : Array<?number>),
<ide> immediates: [],
<ide> requestIdleCallbacks: [],
<ide> identifiers: ([] : Array<null | {methodName: string}>),
<ide> const JSTimersExecution = {
<ide> }
<ide>
<ide> // Clear the metadata
<del> if (type === JSTimersExecution.Type.setTimeout ||
<del> type === JSTimersExecution.Type.setImmediate ||
<del> type === JSTimersExecution.Type.requestAnimationFrame ||
<del> type === JSTimersExecution.Type.requestIdleCallback) {
<add> if (type === 'setTimeout' || type === 'setImmediate' ||
<add> type === 'requestAnimationFrame' || type === 'requestIdleCallback') {
<ide> JSTimersExecution._clearIndex(timerIndex);
<ide> }
<ide>
<ide> try {
<del> if (type === JSTimersExecution.Type.setTimeout ||
<del> type === JSTimersExecution.Type.setInterval ||
<del> type === JSTimersExecution.Type.setImmediate) {
<add> if (type === 'setTimeout' || type === 'setInterval' ||
<add> type === 'setImmediate') {
<ide> callback();
<del> } else if (type === JSTimersExecution.Type.requestAnimationFrame) {
<add> } else if (type === 'requestAnimationFrame') {
<ide> callback(performanceNow());
<del> } else if (type === JSTimersExecution.Type.requestIdleCallback) {
<add> } else if (type === 'requestIdleCallback') {
<ide> callback({
<ide> timeRemaining: function() {
<ide> // TODO: Optimisation: allow running for longer than one frame if | 2 |
Text | Text | add angular version in index.md of angular guide | 0c30465ad4079871e38382d03717613b6a8425d5 | <ide><path>guide/chinese/angular/index.md
<ide> Angular(版本2.x及更高版本)是一个基于Typescript的开源框架,
<ide>
<ide> 谷歌于2010年10月20日发布了AngularJS的初始版本。 AngularJS的稳定版本于2017年12月18日发布,版本为1.6.8。 Angular 2.0发布于2014年9月22日在ng-Europe会议上发布。 Angular 2.0的一个特性是动态加载。
<ide>
<del>经过一些修改后,Angular 4.0于2016年12月发布。 Angular 4向后兼容Angular 2.0。 HttpClient库是Angular 4.0的一个特性。 Angular 5发布于2017年11月1日。支持Progressive Web应用程序是Angular 5.0的改进之一。 Angular 6发布于2018年5月。最新的稳定版本是[6.1.9](https://blog.angular.io/angular-v6-1-now-available-typescript-2-9-scroll-positioning-and-more-9f1c03007bb6)
<add>经过一些修改后,Angular 4.0于2016年12月发布。 Angular 4向后兼容Angular 2.0。 HttpClient库是Angular 4.0的一个特性。 Angular 5发布于2017年11月1日。支持Progressive Web应用程序是Angular 5.0的改进之一。 Angular 6发布于2018年5月。最新的稳定版本是[6.1.9](https://blog.angular.io/angular-v6-1-now-available-typescript-2-9-scroll-positioning-and-more-9f1c03007bb6) Angular 7发布于2018年10月。新的稳定版本是7.0.1。
<ide>
<ide> **安装** :
<ide>
<ide> HTML
<ide>
<ide> 您可以通过以下从官方文档中的步骤安装**角2.x**和其他版本的[角](https://angular.io/guide/quickstart)
<ide>
<del>_注意:请参阅Angular文档以获取更新的信息。_
<ide>\ No newline at end of file
<add>_注意:请参阅Angular文档以获取更新的信息。_ | 1 |
Go | Go | fix handling of shared roots | 08ab55419549374742bc879261f1d55b19af7265 | <ide><path>execdriver/lxc/driver.go
<ide> func (d *driver) Run(c *execdriver.Process, startCallback execdriver.StartCallba
<ide> params = append(params, "-w", c.WorkingDir)
<ide> }
<ide>
<add> params = append(params, "--", c.Entrypoint)
<add> params = append(params, c.Arguments...)
<add>
<ide> if d.sharedRoot {
<ide> // lxc-start really needs / to be non-shared, or all kinds of stuff break
<ide> // when lxc-start unmount things and those unmounts propagate to the main
<ide> func (d *driver) Run(c *execdriver.Process, startCallback execdriver.StartCallba
<ide> }
<ide> }
<ide>
<del> params = append(params, "--", c.Entrypoint)
<del> params = append(params, c.Arguments...)
<del>
<ide> var (
<ide> name = params[0]
<ide> arg = params[1:] | 1 |
Javascript | Javascript | remove old test file | 069099a3e39b6b25fc5c1b10105c4d280760963f | <ide><path>test-net-server.js
<del>process.Buffer.prototype.toString = function () {
<del> return this.utf8Slice(0, this.length);
<del>};
<del>
<del>var sys = require("sys");
<del>var net = require("./lib/net");
<del>
<del>var server = new net.Server(function (socket) {
<del> sys.puts("connection (" + socket.fd + "): "
<del> + socket.remoteAddress
<del> + " port "
<del> + socket.remotePort
<del> );
<del> sys.puts("server fd: " + server.fd);
<del>
<del> socket.addListener("data", function (b) {
<del> socket.send("pong ascii\r\n", "ascii");
<del> socket.send(b);
<del> socket.send("pong utf8\r\n", "utf8");
<del> if (/^quit/.test(b)) {
<del> socket.close();
<del> server.close();
<del> }
<del> });
<del>
<del> socket.addListener("eof", function () {
<del> sys.puts("server peer eof");
<del> socket.close();
<del> });
<del>
<del> socket.addListener('drain', function () {
<del> sys.puts("server-side socket drain");
<del> });
<del>});
<del>
<del>server.addListener("listening", function () {
<del> var c = net.createConnection("/tmp/node.sock");
<del> c.addListener('connect', function () {
<del> sys.puts("!!!client connected");
<del> c.send("hello\n");
<del> });
<del>
<del> c.addListener('drain', function () {
<del> sys.puts("!!!client drain");
<del> });
<del>
<del> c.addListener('data', function (d) {
<del> sys.puts("!!!client got: " + JSON.stringify(d.toString()));
<del> c.close();
<del> });
<del>
<del> c.addListener('eof', function (d) {
<del> sys.puts("!!!client eof");
<del> });
<del>});
<del>
<del>server.listen("/tmp/node.sock");
<del>sys.puts("server fd: " + server.fd); | 1 |
Python | Python | turn some build warnings into errors | 3fc4412564a9a9bf3a77b4210ea1cbd3f11b19f9 | <ide><path>runtests.py
<ide> def build_project(args):
<ide>
<ide> """
<ide>
<add> import distutils.sysconfig
<add>
<ide> root_ok = [os.path.exists(os.path.join(ROOT_DIR, fn))
<ide> for fn in PROJECT_ROOT_FILES]
<ide> if not all(root_ok):
<ide> def build_project(args):
<ide>
<ide> # Always use ccache, if installed
<ide> env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep))
<del>
<add> cvars = distutils.sysconfig.get_config_vars()
<add> if 'gcc' in cvars['CC']:
<add> # add flags used as werrors tools/travis-test.sh
<add> warnings_as_errors = (' -Werror=declaration-after-statement -Werror=vla'
<add> ' -Werror=nonnull -Werror=pointer-arith'
<add> ' -Wlogical-op')
<add> env['CFLAGS'] = warnings_as_errors + env.get('CFLAGS', '')
<ide> if args.debug or args.gcov:
<ide> # assume everyone uses gcc/gfortran
<ide> env['OPT'] = '-O0 -ggdb'
<ide> env['FOPT'] = '-O0 -ggdb'
<ide> if args.gcov:
<del> import distutils.sysconfig
<del> cvars = distutils.sysconfig.get_config_vars()
<ide> env['OPT'] = '-O0 -ggdb'
<ide> env['FOPT'] = '-O0 -ggdb'
<ide> env['CC'] = cvars['CC'] + ' --coverage' | 1 |
Text | Text | link official cloud tpu jax docs | 2df546918e4df58a6fd14fac8a634ec9710a80c1 | <ide><path>examples/flax/README.md
<ide> efficient vectorization), and `pjit` (for automatically sharded model parallelis
<ide> computing per-example gradients is simply `vmap(grad(f))`.
<ide>
<ide> [Flax](https://github.com/google/flax) builds on top of JAX with an ergonomic
<del>module abstraction using Python dataclasses that leads to concise and explicit code. Flax's "lifted" JAX transformations (e.g. `vmap`, `remat`) allow you to nest JAX transformation and modules in any way you wish. Flax is the most widely used JAX library, with [129 dependent projects](https://github.com/google/flax/network/dependents?package_id=UGFja2FnZS01MjEyMjA2MA%3D%3D) as of May 2021. It is also the library underlying all of the official Cloud TPU JAX examples. (TODO: Add link once it's there.)
<add>module abstraction using Python dataclasses that leads to concise and explicit code. Flax's "lifted" JAX transformations (e.g. `vmap`, `remat`) allow you to nest JAX transformation and modules in any way you wish. Flax is the most widely used JAX library, with [129 dependent projects](https://github.com/google/flax/network/dependents?package_id=UGFja2FnZS01MjEyMjA2MA%3D%3D) as of May 2021. It is also the library underlying all of the official Cloud TPU JAX examples.
<ide>
<ide> ## Running on Cloud TPU
<ide>
<ide> All of our JAX/Flax models are designed to run efficiently on Google
<del>Cloud TPUs. Here is a guide for running jobs on Google Cloud TPU.
<del>(TODO: Add a link to the Cloud TPU JAX getting started guide once it's public)
<add>Cloud TPUs. Here is [a guide for running JAX on Google Cloud TPU](https://cloud.google.com/tpu/docs/jax-quickstart-tpu-vm).
<add>
<ide> Each example README contains more details on the specific model and training
<ide> procedure.
<ide> | 1 |
Javascript | Javascript | remove unknown props in art | cb4a0af7dd32130b58bcfe9bcf2bdb9978c92cf5 | <ide><path>src/renderers/art/ReactART.js
<ide> const Surface = React.createClass({
<ide> const Tag = Mode.Surface.tagName;
<ide> return (
<ide> <Tag
<del> accesskey={props.accesskey}
<ide> className={props.className}
<ide> draggable={props.draggable}
<ide> role={props.role}
<ide> style={props.style}
<del> tabindex={props.tabindex}
<ide> title={props.title}
<ide> />
<ide> ); | 1 |
PHP | PHP | add model factory after callbacks with states | bc4624cef6e60b8f711da971f7a466d574f484c6 | <ide><path>src/Illuminate/Database/Eloquent/Factory.php
<ide> public function state($class, $state, $attributes)
<ide> */
<ide> public function afterMaking($class, $callback)
<ide> {
<del> $this->afterMaking[$class][] = $callback;
<add> $this->afterMaking[$class]['default'][] = $callback;
<add>
<add> return $this;
<add> }
<add>
<add> /**
<add> * Define a callback to run after making a model with given type.
<add> *
<add> * @param string $class
<add> * @param string $state
<add> * @param callable $callback
<add> * @return $this
<add> */
<add> public function afterMakingState($class, $state, callable $callback)
<add> {
<add> $this->afterMaking[$class][$state][] = $callback;
<ide>
<ide> return $this;
<ide> }
<ide> public function afterMaking($class, $callback)
<ide> */
<ide> public function afterCreating($class, $callback)
<ide> {
<del> $this->afterCreating[$class][] = $callback;
<add> $this->afterCreating[$class]['default'][] = $callback;
<add>
<add> return $this;
<add> }
<add>
<add> /**
<add> * Define a callback to run after creating a model with given type.
<add> *
<add> * @param string $class
<add> * @param string $state
<add> * @param callable $callback
<add> * @return $this
<add> */
<add> public function afterCreatingState($class, $state, callable $callback)
<add> {
<add> $this->afterCreating[$class][$state][] = $callback;
<ide>
<ide> return $this;
<ide> }
<ide><path>src/Illuminate/Database/Eloquent/FactoryBuilder.php
<ide> protected function applyStates(array $definition, array $attributes = [])
<ide> {
<ide> foreach ($this->activeStates as $state) {
<ide> if (! isset($this->states[$this->class][$state])) {
<add> if ($this->afterStateExists($state)) {
<add> continue;
<add> }
<ide> throw new InvalidArgumentException("Unable to locate [{$state}] state for [{$this->class}].");
<ide> }
<ide>
<ide> protected function expandAttributes(array $attributes)
<ide> */
<ide> public function callAfterMaking($models)
<ide> {
<del> $models->each(function ($model) {
<del> if (isset($this->afterMaking[$this->class])) {
<del> foreach ($this->afterMaking[$this->class] as $callback) {
<del> $callback($model, $this->faker);
<del> }
<del> }
<del> });
<add> $this->callAfter($this->afterMaking, $models);
<ide> }
<ide>
<ide> /**
<ide> public function callAfterMaking($models)
<ide> */
<ide> public function callAfterCreating($models)
<ide> {
<del> $models->each(function ($model) {
<del> if (isset($this->afterCreating[$this->class])) {
<del> foreach ($this->afterCreating[$this->class] as $callback) {
<del> $callback($model, $this->faker);
<del> }
<add> $this->callAfter($this->afterCreating, $models);
<add> }
<add>
<add> /**
<add> * Call after callbacks for each model and state.
<add> *
<add> * @param array $afterCallbacks
<add> * @param \Illuminate\Support\Collection $models
<add> * @return void
<add> */
<add> protected function callAfter(array $afterCallbacks, $models)
<add> {
<add> $states = array_merge([$this->name], $this->activeStates);
<add>
<add> $models->each(function ($model) use ($states, $afterCallbacks) {
<add> foreach ($states as $state) {
<add> $this->callAfterCallbacks($afterCallbacks, $model, $state);
<ide> }
<ide> });
<ide> }
<add>
<add> /**
<add> * Call after callbacks for each model and state.
<add> *
<add> * @param array $afterCallbacks
<add> * @param Model $model
<add> * @param string $state
<add> * @return void
<add> */
<add> protected function callAfterCallbacks(array $afterCallbacks, $model, $state)
<add> {
<add> if (!isset($afterCallbacks[$this->class][$state])) {
<add> return;
<add> }
<add>
<add> foreach ($afterCallbacks[$this->class][$state] as $callback) {
<add> $callback($model, $this->faker);
<add> }
<add> }
<add>
<add> /**
<add> * @param string $state
<add> * @return bool
<add> */
<add> protected function afterStateExists($state)
<add> {
<add> return isset($this->afterMaking[$this->class][$state]) || isset($this->afterCreating[$this->class][$state]);
<add> }
<ide> }
<ide><path>tests/Integration/Database/EloquentFactoryBuilderTest.php
<ide> protected function getEnvironmentSetUp($app)
<ide> $user->setRelation('profile', $profile);
<ide> });
<ide>
<add> $factory->afterMakingState(FactoryBuildableUser::class, 'with_callable_server', function (FactoryBuildableUser $user, Generator $faker) {
<add> $server = factory(FactoryBuildableServer::class)
<add> ->states('callable')
<add> ->make(['user_id' => $user->id]);
<add>
<add> $user->servers->push($server);
<add> });
<add>
<ide> $factory->define(FactoryBuildableTeam::class, function (Generator $faker) {
<ide> return [
<ide> 'name' => $faker->name,
<ide> protected function getEnvironmentSetUp($app)
<ide> ];
<ide> });
<ide>
<add> $factory->afterCreatingState(FactoryBuildableUser::class, 'with_callable_server', function(FactoryBuildableUser $user, Generator $faker){
<add> $server = factory(FactoryBuildableServer::class)
<add> ->states('callable')
<add> ->create(['user_id' => $user->id]);
<add> });
<add>
<ide> $factory->state(FactoryBuildableServer::class, 'inline', ['status' => 'inline']);
<ide>
<ide> $app->singleton(Factory::class, function ($app) use ($factory) {
<ide> public function creating_models_with_after_callback()
<ide> $this->assertTrue($team->users->contains($team->owner));
<ide> }
<ide>
<add> /** @test **/
<add> public function creating_models_with_after_callback_states()
<add> {
<add> $user = factory(FactoryBuildableUser::class)->states('with_callable_server')->create();
<add>
<add> $this->assertNotNull($user->profile);
<add> $this->assertNotNull($user->servers->where('status', 'callable')->first());
<add> }
<add>
<ide> /** @test */
<ide> public function making_models_with_a_custom_connection()
<ide> {
<ide> public function making_models_with_after_callback()
<ide>
<ide> $this->assertNotNull($user->profile);
<ide> }
<add>
<add> /** @test **/
<add> public function making_models_with_after_callback_states()
<add> {
<add> $user = factory(FactoryBuildableUser::class)->states('with_callable_server')->make();
<add>
<add> $this->assertNotNull($user->profile);
<add> $this->assertNotNull($user->servers->where('status', 'callable')->first());
<add> }
<ide> }
<ide>
<ide> class FactoryBuildableUser extends Model | 3 |
Java | Java | remove test duplication | 8dea3c5ade47af23b098e49a5f6ddd44e40820c0 | <ide><path>spring-context-support/src/test/java/org/springframework/cache/ehcache/EhCacheCacheTests.java
<ide> protected EhCacheCache getCache() {
<ide> protected Ehcache getNativeCache() {
<ide> return nativeCache;
<ide> }
<del> @Test
<del> public void testCachePut() throws Exception {
<del> Object key = "enescu";
<del> Object value = "george";
<del>
<del> assertNull(cache.get(key));
<del> assertNull(cache.get(key, String.class));
<del> assertNull(cache.get(key, Object.class));
<del>
<del> cache.put(key, value);
<del> assertEquals(value, cache.get(key).get());
<del> assertEquals(value, cache.get(key, String.class));
<del> assertEquals(value, cache.get(key, Object.class));
<del> assertEquals(value, cache.get(key, null));
<del>
<del> cache.put(key, null);
<del> assertNotNull(cache.get(key));
<del> assertNull(cache.get(key).get());
<del> assertNull(cache.get(key, String.class));
<del> assertNull(cache.get(key, Object.class));
<del> }
<ide>
<ide> @Test
<ide> public void testExpiredElements() throws Exception { | 1 |
Text | Text | fix small typo in development_dependencies_install | 058ae7e1c90b555756c9ecd029d60b5cd0341be0 | <ide><path>guides/source/development_dependencies_install.md
<ide> And if you are on Fedora or CentOS, you're done with
<ide> $ sudo yum install sqlite3 sqlite3-devel
<ide> ```
<ide>
<del>Get a recent version of [Bundler](http://gembundler.com/:)
<add>Get a recent version of [Bundler](http://gembundler.com/)
<ide>
<ide> ```bash
<ide> $ gem install bundler | 1 |
PHP | PHP | fix a few mistakes | a2e413173dda016a22dbb3af078fc017df45b08d | <ide><path>src/Core/Plugin.php
<ide> public static function load($plugin, array $config = [])
<ide> return;
<ide> }
<ide>
<del> if (!Configure::check('pluginPaths')) {
<add> if (!Configure::check('plugins')) {
<ide> try {
<ide> Configure::load('plugins');
<ide> } catch (\Exception $e) {
<ide><path>tests/TestCase/Core/PluginTest.php
<ide> public function testLoadSingleWithPathConfig()
<ide> $this->assertEquals(APP . 'src' . DS, Plugin::classPath('TestPlugin'));
<ide> }
<ide>
<del>
<ide> /**
<ide> * Tests loading multiple plugins at once
<ide> * | 2 |
Python | Python | add option to run perf tuned args | 2894bb53fa97e104bb440d98cff6f05ed9488490 | <ide><path>official/resnet/imagenet_main.py
<ide> def parse_record(raw_record, is_training, dtype):
<ide> return image, label
<ide>
<ide>
<del>def input_fn(is_training, data_dir, batch_size, num_epochs=1, num_gpus=None,
<del> dtype=tf.float32):
<add>def input_fn(is_training, data_dir, batch_size, num_epochs=1,
<add> dtype=tf.float32, datasets_num_private_threads=None,
<add> num_parallel_batches=1):
<ide> """Input function which provides batches for train or eval.
<ide>
<ide> Args:
<ide> is_training: A boolean denoting whether the input is for training.
<ide> data_dir: The directory containing the input data.
<ide> batch_size: The number of samples per batch.
<ide> num_epochs: The number of epochs to repeat the dataset.
<del> num_gpus: The number of gpus used for training.
<ide> dtype: Data type to use for images/features
<add> datasets_num_private_threads: Number of private threads for tf.data.
<add> num_parallel_batches: Number of parallel batches for tf.data.
<ide>
<ide> Returns:
<ide> A dataset that can be used for iteration.
<ide> def input_fn(is_training, data_dir, batch_size, num_epochs=1, num_gpus=None,
<ide> shuffle_buffer=_SHUFFLE_BUFFER,
<ide> parse_record_fn=parse_record,
<ide> num_epochs=num_epochs,
<del> num_gpus=num_gpus,
<del> examples_per_epoch=_NUM_IMAGES['train'] if is_training else None,
<del> dtype=dtype
<add> dtype=dtype,
<add> datasets_num_private_threads=datasets_num_private_threads,
<add> num_parallel_batches=num_parallel_batches
<ide> )
<ide>
<ide>
<ide><path>official/resnet/resnet_run_loop.py
<ide> # pylint: disable=g-bad-import-order
<ide> from absl import flags
<ide> import tensorflow as tf
<add>from tensorflow.contrib.data.python.ops import threadpool
<add>import multiprocessing
<ide>
<ide> from official.resnet import resnet_model
<ide> from official.utils.flags import core as flags_core
<ide> from official.resnet import imagenet_preprocessing
<ide> from official.utils.misc import distribution_utils
<ide> from official.utils.misc import model_helpers
<del># pylint: enable=g-bad-import-order
<ide>
<ide>
<ide> ################################################################################
<ide> # Functions for input processing.
<ide> ################################################################################
<del>def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer,
<del> parse_record_fn, num_epochs=1, num_gpus=None,
<del> examples_per_epoch=None, dtype=tf.float32):
<add>def process_record_dataset(dataset,
<add> is_training,
<add> batch_size,
<add> shuffle_buffer,
<add> parse_record_fn,
<add> num_epochs=1,
<add> dtype=tf.float32,
<add> datasets_num_private_threads=None,
<add> num_parallel_batches=1):
<ide> """Given a Dataset with raw records, return an iterator over the records.
<ide>
<ide> Args:
<ide> def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer,
<ide> parse_record_fn: A function that takes a raw record and returns the
<ide> corresponding (image, label) pair.
<ide> num_epochs: The number of epochs to repeat the dataset.
<del> num_gpus: The number of gpus used for training.
<del> examples_per_epoch: The number of examples in an epoch.
<ide> dtype: Data type to use for images/features.
<add> datasets_num_private_threads: Number of threads for a private
<add> threadpool created for all datasets computation.
<add> num_parallel_batches: Number of parallel batches for tf.data.
<ide>
<ide> Returns:
<ide> Dataset of (image, label) pairs ready for iteration.
<ide> def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer,
<ide> tf.contrib.data.map_and_batch(
<ide> lambda value: parse_record_fn(value, is_training, dtype),
<ide> batch_size=batch_size,
<del> num_parallel_calls=1,
<add> num_parallel_calls=num_parallel_batches,
<ide> drop_remainder=False))
<ide>
<ide> # Operations between the final prefetch and the get_next call to the iterator
<ide> def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer,
<ide> # on how many devices are present.
<ide> dataset = dataset.prefetch(buffer_size=tf.contrib.data.AUTOTUNE)
<ide>
<add> # Defines a specific size thread pool for tf.data operations.
<add> if datasets_num_private_threads:
<add> dataset = threadpool.override_threadpool(
<add> dataset,
<add> threadpool.PrivateThreadPool(
<add> datasets_num_private_threads,
<add> display_name='input_pipeline_thread_pool'))
<add>
<ide> return dataset
<ide>
<ide>
<ide> def _preprocess_image(image_bytes):
<ide> images, {'image_bytes': image_bytes_list})
<ide>
<ide>
<add>def set_environment_vars(flags_obj):
<add> """Adjust flags and set env_vars for performance.
<add>
<add> These settings exist to test the difference between using stock settings
<add> and manual tuning. It also shows some of the ENV_VARS that can be tweaked to
<add> squeeze a few extra examples per second. These settings are defaulted to the
<add> current platform of interest, which changes over time.
<add>
<add> On systems with small numbers of cpu cores, e.g. under 8 logical cores,
<add> setting up a private thread pool for GPU with `tf_gpu_thread_mode=gpu_private`
<add> may perform poorly.
<add>
<add> Args:
<add> flags_obj: Current flags, which will be adjusted possibly overriding
<add> what has been set by the user on the command-line.
<add>
<add> Returns:
<add> tf.ConfigProto: session_config proto to add to the session.
<add> """
<add> if flags_obj.tf_gpu_thread_mode in ['gpu_private']:
<add> cpu_count = multiprocessing.cpu_count()
<add> print('Logical CPU cores:', cpu_count)
<add>
<add> # Sets up thread pool for each GPU for op scheduling.
<add> per_gpu_thread_count = 1
<add> total_gpu_thread_count = per_gpu_thread_count * flags_obj.num_gpus
<add> os.environ['TF_GPU_THREAD_MODE'] = flags_obj.tf_gpu_thread_mode
<add> os.environ['TF_GPU_THREAD_COUNT'] = str(per_gpu_thread_count)
<add> print('TF_GPU_THREAD_COUNT:', os.environ['TF_GPU_THREAD_COUNT'])
<add>
<add> # Reduces general thread pool by number of threads used for GPU pool.
<add> main_thread_count = cpu_count - total_gpu_thread_count
<add> flags_obj.inter_op_parallelism_threads = main_thread_count
<add>
<add> # Sets thread count for tf.data. Logical cores minus threads assign to the
<add> # private GPU pool along with 2 thread per GPU for event monitoring and
<add> # sending / receiving tensors.
<add> num_monitoring_threads = 2 * flags_obj.num_gpus
<add> num_private_threads = (cpu_count - total_gpu_thread_count
<add> - num_monitoring_threads)
<add> flags_obj.datasets_num_private_threads = num_private_threads
<add>
<add> print('inter_op_parallelism_threads:', flags_obj.inter_op_parallelism_threads)
<add> print('intra_op_parallelism_threads:', flags_obj.intra_op_parallelism_threads)
<add> print('datasets_num_private_threads:', flags_obj.datasets_num_private_threads)
<add>
<add> # Create session config based on values of inter_op_parallelism_threads and
<add> # intra_op_parallelism_threads. Note that we default to having
<add> # allow_soft_placement = True, which is required for multi-GPU and not
<add> # harmful for other modes.
<add> session_config = tf.ConfigProto(
<add> inter_op_parallelism_threads=flags_obj.inter_op_parallelism_threads,
<add> intra_op_parallelism_threads=flags_obj.intra_op_parallelism_threads,
<add> allow_soft_placement=True)
<add> return session_config
<add>
<add>
<ide> ################################################################################
<ide> # Functions for running training/eval/validation loops for the model.
<ide> ################################################################################
<ide> def _dense_grad_filter(gvs):
<ide> train_op = None
<ide>
<ide> accuracy = tf.metrics.accuracy(labels, predictions['classes'])
<del> accuracy_top_5 = tf.metrics.mean(tf.nn.in_top_k(predictions=logits,
<del> targets=labels,
<del> k=5,
<del> name='top_5_op'))
<del> metrics = {'accuracy': accuracy,
<del> 'accuracy_top_5': accuracy_top_5}
<add> #accuracy_top_5 = tf.metrics.mean(tf.nn.in_top_k(predictions=logits,
<add> # targets=labels,
<add> # k=5,
<add> # name='top_5_op'))
<add> metrics = {'accuracy': accuracy}
<add> # 'accuracy_top_5': accuracy_top_5}
<ide>
<ide> # Create a tensor named train_accuracy for logging purposes
<ide> tf.identity(accuracy[1], name='train_accuracy')
<del> tf.identity(accuracy_top_5[1], name='train_accuracy_top_5')
<add> #tf.identity(accuracy_top_5[1], name='train_accuracy_top_5')
<ide> tf.summary.scalar('train_accuracy', accuracy[1])
<del> tf.summary.scalar('train_accuracy_top_5', accuracy_top_5[1])
<add> #tf.summary.scalar('train_accuracy_top_5', accuracy_top_5[1])
<ide>
<ide> return tf.estimator.EstimatorSpec(
<ide> mode=mode,
<ide> predictions=predictions,
<ide> loss=loss,
<del> train_op=train_op,
<del> eval_metric_ops=metrics)
<add> train_op=train_op)
<add> #eval_metric_ops=metrics)
<ide>
<ide>
<ide> def resnet_main(
<ide> def resnet_main(
<ide>
<ide> model_helpers.apply_clean(flags.FLAGS)
<ide>
<del> # Using the Winograd non-fused algorithms provides a small performance boost.
<del> os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1'
<del>
<del> # Create session config based on values of inter_op_parallelism_threads and
<del> # intra_op_parallelism_threads. Note that we default to having
<del> # allow_soft_placement = True, which is required for multi-GPU and not
<del> # harmful for other modes.
<del> session_config = tf.ConfigProto(
<del> inter_op_parallelism_threads=flags_obj.inter_op_parallelism_threads,
<del> intra_op_parallelism_threads=flags_obj.intra_op_parallelism_threads,
<del> allow_soft_placement=True)
<add> session_config = set_environment_vars(flags_obj)
<ide>
<ide> distribution_strategy = distribution_utils.get_distribution_strategy(
<ide> flags_core.get_num_gpus(flags_obj), flags_obj.all_reduce_alg)
<ide>
<add> # Creates a `RunConfig` that checkpoints every 24 hours which essentially
<add> # results in checkpoints at the end of each training loop as determined by
<add> # `epochs_between_evals`. Doing it more often is a needless small cost.
<ide> run_config = tf.estimator.RunConfig(
<del> train_distribute=distribution_strategy, session_config=session_config)
<add> train_distribute=distribution_strategy,
<add> session_config=session_config,
<add> save_checkpoints_secs=60*60*24)
<ide>
<ide> # initialize our model with all but the dense layer from pretrained resnet
<ide> if flags_obj.pretrained_model_checkpoint_path is not None:
<ide> def resnet_main(
<ide>
<ide> def input_fn_train(num_epochs):
<ide> return input_function(
<del> is_training=True, data_dir=flags_obj.data_dir,
<add> is_training=True,
<add> data_dir=flags_obj.data_dir,
<ide> batch_size=distribution_utils.per_device_batch_size(
<ide> flags_obj.batch_size, flags_core.get_num_gpus(flags_obj)),
<ide> num_epochs=num_epochs,
<del> num_gpus=flags_core.get_num_gpus(flags_obj),
<del> dtype=flags_core.get_tf_dtype(flags_obj))
<add> dtype=flags_core.get_tf_dtype(flags_obj),
<add> datasets_num_private_threads=flags_obj.datasets_num_private_threads,
<add> num_parallel_batches=flags_obj.num_parallel_calls)
<ide>
<ide> def input_fn_eval():
<ide> return input_function(
<del> is_training=False, data_dir=flags_obj.data_dir,
<add> is_training=False,
<add> data_dir=flags_obj.data_dir,
<ide> batch_size=distribution_utils.per_device_batch_size(
<ide> flags_obj.batch_size, flags_core.get_num_gpus(flags_obj)),
<ide> num_epochs=1,
<ide> def input_fn_eval():
<ide> def define_resnet_flags(resnet_size_choices=None):
<ide> """Add flags and validators for ResNet."""
<ide> flags_core.define_base()
<del> flags_core.define_performance(num_parallel_calls=False)
<add> flags_core.define_performance()
<ide> flags_core.define_image()
<ide> flags_core.define_benchmark()
<ide> flags.adopt_module_key_flags(flags_core)
<ide> def define_resnet_flags(resnet_size_choices=None):
<ide> help=flags_core.help_wrap('Skip training and only perform evaluation on '
<ide> 'the latest checkpoint.'))
<ide> flags.DEFINE_boolean(
<del> name="image_bytes_as_serving_input", default=False,
<add> name='image_bytes_as_serving_input', default=False,
<ide> help=flags_core.help_wrap(
<ide> 'If True exports savedmodel with serving signature that accepts '
<ide> 'JPEG image bytes instead of a fixed size [HxWxC] tensor that '
<ide><path>official/utils/flags/_performance.py
<ide> def get_loss_scale(flags_obj):
<ide>
<ide> def define_performance(num_parallel_calls=True, inter_op=True, intra_op=True,
<ide> synthetic_data=True, max_train_steps=True, dtype=True,
<del> all_reduce_alg=True):
<add> all_reduce_alg=True, tf_gpu_thread_mode=True,
<add> datasets_num_private_threads=True):
<ide> """Register flags for specifying performance tuning arguments.
<ide>
<ide> Args:
<ide> def define_performance(num_parallel_calls=True, inter_op=True, intra_op=True,
<ide> max_train_steps: Create a flags to allow specification of maximum number
<ide> of training steps
<ide> dtype: Create flags for specifying dtype.
<del>
<add> all_reduce_alg: If set forces a specific algorithm for multi-gpu.
<add> tf_gpu_thread_mode: gpu_private triggers us of private thread pool.
<add> datasets_num_private_threads: Number of private threads for datasets.
<ide> Returns:
<ide> A list of flags for core.py to marks as key flags.
<ide> """
<ide> def define_performance(num_parallel_calls=True, inter_op=True, intra_op=True,
<ide> if num_parallel_calls:
<ide> flags.DEFINE_integer(
<ide> name="num_parallel_calls", short_name="npc",
<del> default=multiprocessing.cpu_count(),
<add> default=1,
<ide> help=help_wrap("The number of records that are processed in parallel "
<ide> "during input processing. This can be optimized per "
<ide> "data set but for generally homogeneous data sets, "
<ide> def _check_loss_scale(loss_scale): # pylint: disable=unused-variable
<ide> "See tf.contrib.distribute.AllReduceCrossTowerOps for "
<ide> "more details and available options."))
<ide>
<add> if tf_gpu_thread_mode:
<add> flags.DEFINE_string(
<add> name="tf_gpu_thread_mode", short_name="gt_mode", default="global",
<add> help=help_wrap(
<add> "Whether and how the GPU device uses its own threadpool.")
<add> )
<add>
<add> if datasets_num_private_threads:
<add> flags.DEFINE_integer(
<add> name="datasets_num_private_threads", short_name="dataset_thread_count",
<add> default=None,
<add> help=help_wrap(
<add> "Number of threads for a private threadpool created for all"
<add> "datasets computation..")
<add> )
<ide>
<ide> return key_flags | 3 |
Mixed | Go | add `--cpus` support for `docker update` | 61022436926a8d0c92068e8116a2ad77f43eb6d6 | <ide><path>cli/command/container/update.go
<ide> type updateOptions struct {
<ide> memorySwap opts.MemSwapBytes
<ide> kernelMemory opts.MemBytes
<ide> restartPolicy string
<add> cpus opts.NanoCPUs
<ide>
<ide> nFlag int
<ide>
<ide> func NewUpdateCommand(dockerCli *command.DockerCli) *cobra.Command {
<ide> flags.Var(&opts.kernelMemory, "kernel-memory", "Kernel memory limit")
<ide> flags.StringVar(&opts.restartPolicy, "restart", "", "Restart policy to apply when a container exits")
<ide>
<add> flags.Var(&opts.cpus, "cpus", "Number of CPUs")
<add> flags.SetAnnotation("cpus", "version", []string{"1.29"})
<add>
<ide> return cmd
<ide> }
<ide>
<ide> func runUpdate(dockerCli *command.DockerCli, opts *updateOptions) error {
<ide> CPUQuota: opts.cpuQuota,
<ide> CPURealtimePeriod: opts.cpuRealtimePeriod,
<ide> CPURealtimeRuntime: opts.cpuRealtimeRuntime,
<add> NanoCPUs: opts.cpus.Value(),
<ide> }
<ide>
<ide> updateConfig := containertypes.UpdateConfig{
<ide><path>container/container_unix.go
<ide> func (container *Container) UpdateContainer(hostConfig *containertypes.HostConfi
<ide> // update resources of container
<ide> resources := hostConfig.Resources
<ide> cResources := &container.HostConfig.Resources
<add>
<add> // validate NanoCPUs, CPUPeriod, and CPUQuota
<add> // Becuase NanoCPU effectively updates CPUPeriod/CPUQuota,
<add> // once NanoCPU is already set, updating CPUPeriod/CPUQuota will be blocked, and vice versa.
<add> // In the following we make sure the intended update (resources) does not conflict with the existing (cResource).
<add> if resources.NanoCPUs > 0 && cResources.CPUPeriod > 0 {
<add> return fmt.Errorf("Conflicting options: Nano CPUs cannot be updated as CPU Period has already been set")
<add> }
<add> if resources.NanoCPUs > 0 && cResources.CPUQuota > 0 {
<add> return fmt.Errorf("Conflicting options: Nano CPUs cannot be updated as CPU Quota has already been set")
<add> }
<add> if resources.CPUPeriod > 0 && cResources.NanoCPUs > 0 {
<add> return fmt.Errorf("Conflicting options: CPU Period cannot be updated as NanoCPUs has already been set")
<add> }
<add> if resources.CPUQuota > 0 && cResources.NanoCPUs > 0 {
<add> return fmt.Errorf("Conflicting options: CPU Quota cannot be updated as NanoCPUs has already been set")
<add> }
<add>
<ide> if resources.BlkioWeight != 0 {
<ide> cResources.BlkioWeight = resources.BlkioWeight
<ide> }
<ide> if resources.CPUShares != 0 {
<ide> cResources.CPUShares = resources.CPUShares
<ide> }
<add> if resources.NanoCPUs != 0 {
<add> cResources.NanoCPUs = resources.NanoCPUs
<add> }
<ide> if resources.CPUPeriod != 0 {
<ide> cResources.CPUPeriod = resources.CPUPeriod
<ide> }
<ide><path>daemon/update_linux.go
<ide> package daemon
<ide>
<ide> import (
<add> "time"
<add>
<ide> "github.com/docker/docker/api/types/container"
<ide> "github.com/docker/docker/libcontainerd"
<ide> )
<ide> func toContainerdResources(resources container.Resources) libcontainerd.Resource
<ide> var r libcontainerd.Resources
<ide> r.BlkioWeight = uint64(resources.BlkioWeight)
<ide> r.CpuShares = uint64(resources.CPUShares)
<del> r.CpuPeriod = uint64(resources.CPUPeriod)
<del> r.CpuQuota = uint64(resources.CPUQuota)
<add> if resources.NanoCPUs != 0 {
<add> r.CpuPeriod = uint64(100 * time.Millisecond / time.Microsecond)
<add> r.CpuQuota = uint64(resources.NanoCPUs) * r.CpuPeriod / 1e9
<add> } else {
<add> r.CpuPeriod = uint64(resources.CPUPeriod)
<add> r.CpuQuota = uint64(resources.CPUQuota)
<add> }
<ide> r.CpusetCpus = resources.CpusetCpus
<ide> r.CpusetMems = resources.CpusetMems
<ide> r.MemoryLimit = uint64(resources.Memory)
<ide><path>docs/api/version-history.md
<ide> keywords: "API, Docker, rcli, REST, documentation"
<ide> * `POST /services/create` and `POST /services/(id or name)/update` now accept a `rollback` value for `FailureAction`.
<ide> * `POST /services/create` and `POST /services/(id or name)/update` now accept an optional `RollbackConfig` object which specifies rollback options.
<ide> * `GET /services` now supports a `mode` filter to filter services based on the service mode (either `global` or `replicated`).
<add>* `POST /containers/(name)/update` now supports updating `NanoCPUs` that represents CPU quota in units of 10<sup>-9</sup> CPUs.
<ide>
<ide> ## v1.27 API changes
<ide>
<ide><path>docs/reference/commandline/update.md
<ide> Usage: docker update [OPTIONS] CONTAINER [CONTAINER...]
<ide> Update configuration of one or more containers
<ide>
<ide> Options:
<del> --blkio-weight value Block IO (relative weight), between 10 and 1000
<add> --blkio-weight uint16 Block IO (relative weight), between 10 and 1000, or 0 to disable (default 0)
<ide> --cpu-period int Limit CPU CFS (Completely Fair Scheduler) period
<ide> --cpu-quota int Limit CPU CFS (Completely Fair Scheduler) quota
<del> -c, --cpu-shares int CPU shares (relative weight)
<ide> --cpu-rt-period int Limit the CPU real-time period in microseconds
<ide> --cpu-rt-runtime int Limit the CPU real-time runtime in microseconds
<add> -c, --cpu-shares int CPU shares (relative weight)
<add> --cpus decimal Number of CPUs (default 0.000)
<ide> --cpuset-cpus string CPUs in which to allow execution (0-3, 0,1)
<ide> --cpuset-mems string MEMs in which to allow execution (0-3, 0,1)
<ide> --help Print usage
<ide><path>integration-cli/docker_cli_update_unix_test.go
<ide> func (s *DockerSuite) TestUpdateNotAffectMonitorRestartPolicy(c *check.C) {
<ide> c.Assert(err, checker.IsNil)
<ide> c.Assert(waitRun(id), checker.IsNil)
<ide> }
<add>
<add>func (s *DockerSuite) TestUpdateWithNanoCPUs(c *check.C) {
<add> testRequires(c, cpuCfsQuota, cpuCfsPeriod)
<add>
<add> file1 := "/sys/fs/cgroup/cpu/cpu.cfs_quota_us"
<add> file2 := "/sys/fs/cgroup/cpu/cpu.cfs_period_us"
<add>
<add> out, _ := dockerCmd(c, "run", "-d", "--cpus", "0.5", "--name", "top", "busybox", "top")
<add> c.Assert(strings.TrimSpace(out), checker.Not(checker.Equals), "")
<add>
<add> out, _ = dockerCmd(c, "exec", "top", "sh", "-c", fmt.Sprintf("cat %s && cat %s", file1, file2))
<add> c.Assert(strings.TrimSpace(out), checker.Equals, "50000\n100000")
<add>
<add> out = inspectField(c, "top", "HostConfig.NanoCpus")
<add> c.Assert(out, checker.Equals, "5e+08", check.Commentf("setting the Nano CPUs failed"))
<add> out = inspectField(c, "top", "HostConfig.CpuQuota")
<add> c.Assert(out, checker.Equals, "0", check.Commentf("CPU CFS quota should be 0"))
<add> out = inspectField(c, "top", "HostConfig.CpuPeriod")
<add> c.Assert(out, checker.Equals, "0", check.Commentf("CPU CFS period should be 0"))
<add>
<add> out, _, err := dockerCmdWithError("update", "--cpu-quota", "80000", "top")
<add> c.Assert(err, checker.NotNil)
<add> c.Assert(out, checker.Contains, "Conflicting options: CPU Quota cannot be updated as NanoCPUs has already been set")
<add>
<add> out, _ = dockerCmd(c, "update", "--cpus", "0.8", "top")
<add> out = inspectField(c, "top", "HostConfig.NanoCpus")
<add> c.Assert(out, checker.Equals, "8e+08", check.Commentf("updating the Nano CPUs failed"))
<add> out = inspectField(c, "top", "HostConfig.CpuQuota")
<add> c.Assert(out, checker.Equals, "0", check.Commentf("CPU CFS quota should be 0"))
<add> out = inspectField(c, "top", "HostConfig.CpuPeriod")
<add> c.Assert(out, checker.Equals, "0", check.Commentf("CPU CFS period should be 0"))
<add>
<add> out, _ = dockerCmd(c, "exec", "top", "sh", "-c", fmt.Sprintf("cat %s && cat %s", file1, file2))
<add> c.Assert(strings.TrimSpace(out), checker.Equals, "80000\n100000")
<add>} | 6 |
Python | Python | add code for dynamic sorting | 3f228f476dec45858f36b1fe6541193bcebf32e9 | <ide><path>glances/core/glances_processes.py
<ide> def __init__(self, process=None, stats=None, sort_key=None, root=False):
<ide> self.children = []
<ide> self.children_sorted = False
<ide> self.sort_key = sort_key
<add> self.reverse_sorting = True
<ide> self.is_root = root
<ide>
<ide> def __str__(self):
<ide> def __str__(self):
<ide> lines.append(indent_str + child_line)
<ide> return "\n".join(lines)
<ide>
<add> def setSorting(self, key, reverse):
<add> if (self.sort_key != key) or (self.reverse_sorting != reverse):
<add> self.children_sorted = False
<add> self.sort_key = key
<add> self.reverse_sorting = reverse
<add>
<ide> def getRessourceUsage(self):
<ide> """ Return ressource usage for a process and all its children. """
<ide> # special case for root
<ide> def __iter__(self):
<ide> if not self.is_root:
<ide> yield self
<ide> if not self.children_sorted:
<del> self.children.sort(key=__class__.getRessourceUsage, reverse=True)
<add> self.children.sort(key=__class__.getRessourceUsage, reverse=self.reverse_sorting)
<ide> self.children_sorted = True
<ide> for child in self.children:
<ide> yield from iter(child) | 1 |
Go | Go | add container migration to aufs driver | 29f07f854497571db570be79c8df878624f5b41c | <ide><path>graphdriver/aufs/aufs.go
<ide> func (AufsDriver) Status() [][2]string {
<ide> return nil
<ide> }
<ide>
<add>// Exists returns true if the given id is registered with
<add>// this driver
<ide> func (a AufsDriver) Exists(id string) bool {
<ide> if _, err := os.Lstat(path.Join(a.rootPath(), "diff", id)); err != nil {
<ide> return false
<ide><path>graphdriver/aufs/migrate.go
<ide> import (
<ide> "io/ioutil"
<ide> "os"
<ide> "path"
<del> "time"
<ide> )
<ide>
<del>type imageMetadata struct {
<del> ID string `json:"id"`
<del> ParentID string `json:"parent,omitempty"`
<del> Created time.Time `json:"created"`
<del> DockerVersion string `json:"docker_version,omitempty"`
<del> Architecture string `json:"architecture,omitempty"`
<add>type metadata struct {
<add> ID string `json:"id"`
<add> ParentID string `json:"parent,omitempty"`
<add> Image string `json:"Image,omitempty"`
<ide>
<del> parent *imageMetadata
<add> parent *metadata
<ide> }
<ide>
<ide> func pathExists(pth string) bool {
<ide> func pathExists(pth string) bool {
<ide> }
<ide>
<ide> // Migrate existing images and containers from docker < 0.7.x
<del>func (a *AufsDriver) Migrate(pth string) error {
<add>//
<add>// The format pre 0.7 is for docker to store the metadata and filesystem
<add>// content in the same directory. For the migration to work we need to move Image layer
<add>// data from /var/lib/docker/graph/<id>/layers to the diff of the registered id.
<add>//
<add>// Next we need to migrate the container's rw layer to diff of the driver. After the
<add>// contents are migrated we need to register the image and container ids with the
<add>// driver.
<add>//
<add>// For the migration we try to move the folder containing the layer files, if that
<add>// fails because the data is currently mounted we will fallback to creating a
<add>// symlink.
<add>func (a *AufsDriver) Migrate(pth string, setupInit func(p string) error) error {
<add> if pathExists(path.Join(pth, "graph")) {
<add> if err := a.migrateImages(path.Join(pth, "graph")); err != nil {
<add> return err
<add> }
<add> return a.migrateContainers(path.Join(pth, "containers"), setupInit)
<add> }
<add> return nil
<add>}
<add>
<add>func (a *AufsDriver) migrateContainers(pth string, setupInit func(p string) error) error {
<add> fis, err := ioutil.ReadDir(pth)
<add> if err != nil {
<add> return err
<add> }
<add>
<add> for _, fi := range fis {
<add> if id := fi.Name(); fi.IsDir() && pathExists(path.Join(pth, id, "rw")) && !a.Exists(id) {
<add> if err := tryRelocate(path.Join(pth, id, "rw"), path.Join(a.rootPath(), "diff", id)); err != nil {
<add> return err
<add> }
<add>
<add> metadata, err := loadMetadata(path.Join(pth, id, "config.json"))
<add> if err != nil {
<add> return err
<add> }
<add>
<add> initID := fmt.Sprintf("%s-init", id)
<add> if err := a.Create(initID, metadata.Image); err != nil {
<add> return err
<add> }
<add>
<add> initPath, err := a.Get(initID)
<add> if err != nil {
<add> return err
<add> }
<add> // setup init layer
<add> if err := setupInit(initPath); err != nil {
<add> return err
<add> }
<add>
<add> if err := a.Create(id, initID); err != nil {
<add> return err
<add> }
<add> }
<add> }
<add> return nil
<add>}
<add>
<add>func (a *AufsDriver) migrateImages(pth string) error {
<ide> fis, err := ioutil.ReadDir(pth)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> var (
<del> metadata = make(map[string]*imageMetadata)
<del> current *imageMetadata
<del> exists bool
<add> m = make(map[string]*metadata)
<add> current *metadata
<add> exists bool
<ide> )
<ide>
<del> // Load metadata
<ide> for _, fi := range fis {
<ide> if id := fi.Name(); fi.IsDir() && pathExists(path.Join(pth, id, "layer")) && !a.Exists(id) {
<del> if current, exists = metadata[id]; !exists {
<del> current, err = loadMetadata(pth, id)
<add> if current, exists = m[id]; !exists {
<add> current, err = loadMetadata(path.Join(pth, id, "json"))
<ide> if err != nil {
<ide> return err
<ide> }
<del> metadata[id] = current
<add> m[id] = current
<ide> }
<ide> }
<ide> }
<ide>
<del> // Recreate tree
<del> for _, v := range metadata {
<del> v.parent = metadata[v.ParentID]
<add> for _, v := range m {
<add> v.parent = m[v.ParentID]
<ide> }
<ide>
<del> // Perform image migration
<del> for _, v := range metadata {
<del> if err := migrateImage(v, a, pth); err != nil {
<add> for _, v := range m {
<add> if err := a.migrateImage(v, pth); err != nil {
<ide> return err
<ide> }
<ide> }
<ide> return nil
<ide> }
<ide>
<del>func migrateImage(m *imageMetadata, a *AufsDriver, pth string) error {
<add>func (a *AufsDriver) migrateImage(m *metadata, pth string) error {
<ide> if !pathExists(path.Join(a.rootPath(), "diff", m.ID)) {
<ide> if m.parent != nil {
<del> migrateImage(m.parent, a, pth)
<add> a.migrateImage(m.parent, pth)
<ide> }
<ide> if err := tryRelocate(path.Join(pth, m.ID, "layer"), path.Join(a.rootPath(), "diff", m.ID)); err != nil {
<ide> return err
<ide> func tryRelocate(oldPath, newPath string) error {
<ide> return nil
<ide> }
<ide>
<del>func loadMetadata(pth, id string) (*imageMetadata, error) {
<del> f, err := os.Open(path.Join(pth, id, "json"))
<add>func loadMetadata(pth string) (*metadata, error) {
<add> f, err := os.Open(pth)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> defer f.Close()
<ide>
<ide> var (
<del> out = &imageMetadata{}
<add> out = &metadata{}
<ide> dec = json.NewDecoder(f)
<ide> )
<ide>
<ide><path>runtime.go
<ide> func NewRuntimeFromDirectory(config *DaemonConfig) (*Runtime, error) {
<ide> }
<ide>
<ide> if ad, ok := driver.(*aufs.AufsDriver); ok {
<del> if err := ad.Migrate(path.Join(config.Root, "graph")); err != nil {
<add> if err := ad.Migrate(config.Root, setupInitLayer); err != nil {
<ide> return nil, err
<ide> }
<ide> } | 3 |
Javascript | Javascript | remove superfluous check in while loop | b359be74a1102b7220235b41821a410cd6f3b7a0 | <ide><path>src/renderers/shared/stack/reconciler/ReactUpdates.js
<ide> var flushBatchedUpdates = function() {
<ide> // componentDidUpdate) but we need to check here too in order to catch
<ide> // updates enqueued by setState callbacks.
<ide> while (dirtyComponents.length) {
<del> if (dirtyComponents.length) {
<del> var transaction = ReactUpdatesFlushTransaction.getPooled();
<del> transaction.perform(runBatchedUpdates, null, transaction);
<del> ReactUpdatesFlushTransaction.release(transaction);
<del> }
<add> var transaction = ReactUpdatesFlushTransaction.getPooled();
<add> transaction.perform(runBatchedUpdates, null, transaction);
<add> ReactUpdatesFlushTransaction.release(transaction);
<ide> }
<ide> };
<ide> | 1 |
Ruby | Ruby | fix documentation for actionmailer template names | 94b97f6154e3a86fe4520dc685d010a533105650 | <ide><path>actionmailer/lib/action_mailer/base.rb
<ide> module ActionMailer # :nodoc:
<ide> #
<ide> # For example, if the following templates exist:
<ide> # * signup_notification.text.erb
<del> # * signup_notification.text.html.erb
<del> # * signup_notification.text.xml.builder
<del> # * signup_notification.text.yaml.erb
<add> # * signup_notification.html.erb
<add> # * signup_notification.xml.builder
<add> # * signup_notification.yaml.erb
<ide> #
<ide> # Each would be rendered and added as a separate part to the message, with the corresponding content
<ide> # type. The content type for the entire message is automatically set to <tt>multipart/alternative</tt>,
<ide> module ActionMailer # :nodoc:
<ide> # end
<ide> # end
<ide> #
<del> # Which will (if it had both a <tt>welcome.text.erb</tt> and <tt>welcome.text.html.erb</tt>
<add> # Which will (if it had both a <tt>welcome.text.erb</tt> and <tt>welcome.html.erb</tt>
<ide> # template in the view directory), send a complete <tt>multipart/mixed</tt> email with two parts,
<ide> # the first part being a <tt>multipart/alternative</tt> with the text and HTML email parts inside,
<ide> # and the second being a <tt>application/pdf</tt> with a Base64 encoded copy of the file.pdf book | 1 |
Python | Python | update error code | 872121955c1ba3e8b9d4b2ee9b9ac89b2e85d1d5 | <ide><path>spacy/cli/pretrain.py
<ide> def get_vectors_loss(ops, docs, prediction, objective="L2"):
<ide> elif objective == "cosine":
<ide> loss, d_target = get_cossim_loss(prediction, target)
<ide> else:
<del> raise ValueError(Errors.E139.format(loss_func=objective))
<add> raise ValueError(Errors.E142.format(loss_func=objective))
<ide> return loss, d_target
<ide>
<ide> | 1 |
Python | Python | fix momentum value flag from string to float | ccb463174f17bab2626d9c292e2d82038f07e8a5 | <ide><path>research/attention_ocr/python/common_flags.py
<ide> def define():
<ide> flags.DEFINE_string('optimizer', 'momentum',
<ide> 'the optimizer to use')
<ide>
<del> flags.DEFINE_string('momentum', 0.9,
<add> flags.DEFINE_float('momentum', 0.9,
<ide> 'momentum value for the momentum optimizer if used')
<ide>
<ide> flags.DEFINE_bool('use_augment_input', True, | 1 |
Text | Text | explain arbitrariness of ref name in callback | 79be3543ddf9ea7a12a40411e93daecada108645 | <ide><path>docs/docs/refs-and-the-dom.md
<ide> class CustomTextInput extends React.Component {
<ide>
<ide> render() {
<ide> // Use the `ref` callback to store a reference to the text input DOM
<del> // element in this.textInput.
<add> // element in an instance field (for example, this.textInput).
<ide> return (
<ide> <div>
<ide> <input | 1 |
Text | Text | add parentheses to print commands | d63ea99e3f43c0d39bdd496067e2fc5035919087 | <ide><path>guide/english/python/basic-operators/index.md
<ide> A membership operator is used to identify membership in any sequence (e.g. lists
<ide> a = [1,2,3,4,5]
<ide>
<ide> #Is 3 in the list a?
<del>print 3 in a # prints True
<add>print( 3 in a ) # prints True
<ide>
<ide> #Is 12 not in list a?
<del>print 12 not in a # prints True
<add>print( 12 not in a ) # prints True
<ide>
<ide> str = "Hello World"
<ide>
<ide> #Does the string str contain World?
<del>print "World" in str # prints True
<add>print( "World" in str ) # prints True
<ide>
<ide> #Does the string str contain world? (note: case sensitive)
<del>print "world" in str # prints False
<add>print( "world" in str ) # prints False
<ide>
<del>print "code" not in str # prints True
<add>print( "code" not in str ) # prints True
<ide> ```
<ide> #### Identity Operators
<ide>
<ide> Please note that two values being equal does not necessarily require they be ide
<ide> a = 3
<ide> b = 3
<ide> c = 4
<del>print a is b # prints True
<del>print a is not b # prints False
<del>print a is not c # prints True
<add>print( a is b ) # prints True
<add>print( a is not b ) # prints False
<add>print( a is not c ) # prints True
<ide>
<ide> x = 1
<ide> y = x
<ide> z = y
<del>print z is 1 # prints True
<del>print z is x # prints True
<del>print y is x # prints True
<add>print( z is 1 ) # prints True
<add>print( z is x ) # prints True
<add>print( y is x ) # prints True
<ide>
<ide> str1 = "FreeCodeCamp"
<ide> str2 = "FreeCodeCamp"
<ide>
<del>print str1 is str2 # prints True
<del>print "Code" is str2 # prints False
<add>print( str1 is str2 ) # prints True
<add>print( "Code" is str2 ) # prints False
<ide>
<ide> a = [10,20,30]
<ide> b = [10,20,30]
<ide>
<del>print a is b # prints False (since lists are mutable in Python)
<add>print( a is b ) # prints False (since lists are mutable in Python)
<ide>
<del>print(str1[:4])
<add>print( str1[:4] )
<ide> # Above code gives the output as Free
<del>print(str[4:])
<add>print( str[4:] )
<ide> # Above code gives the output as CodeCamp
<ide> ``` | 1 |
Ruby | Ruby | fix failing ap tests | 0f4da5b39394fe1096e93a7f2600f4d90330d1c0 | <ide><path>actionpack/test/dispatch/routing_test.rb
<ide> def test_nested_resource_constraints
<ide>
<ide> def test_named_routes_collision_is_avoided_unless_explicitly_given_as
<ide> assert_equal "/c/1", routes_collision_path(1)
<del> assert_equal "/forced_collision", routes_forced_collision_path
<add> assert_equal "/fc", routes_forced_collision_path
<ide> end
<ide>
<ide> def test_redirect_argument_error | 1 |
Go | Go | synchronize mkdirall() with latest os.mkdirall() | f058afc861c2f56bf9e97472e99df65c6493e694 | <ide><path>pkg/system/filesys_windows.go
<ide> func mkdirall(path string, perm *windows.SecurityAttributes) error {
<ide> if dir.IsDir() {
<ide> return nil
<ide> }
<del> return &os.PathError{
<del> Op: "mkdir",
<del> Path: path,
<del> Err: syscall.ENOTDIR,
<del> }
<add> return &os.PathError{Op: "mkdir", Path: path, Err: syscall.ENOTDIR}
<ide> }
<ide>
<ide> // Slow path: make sure parent exists and then call Mkdir for path.
<ide> func mkdirall(path string, perm *windows.SecurityAttributes) error {
<ide> }
<ide>
<ide> if j > 1 {
<del> // Create parent
<del> err = mkdirall(path[0:j-1], perm)
<add> // Create parent.
<add> err = mkdirall(fixRootDirectory(path[:j-1]), perm)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> }
<ide>
<del> // Parent now exists; invoke os.Mkdir or mkdirWithACL and use its result.
<add> // Parent now exists; invoke Mkdir and use its result.
<ide> err = mkdirWithACL(path, perm)
<ide> if err != nil {
<ide> // Handle arguments like "foo/." by
<ide> func mkdirWithACL(name string, sa *windows.SecurityAttributes) error {
<ide> return nil
<ide> }
<ide>
<add>// fixRootDirectory fixes a reference to a drive's root directory to
<add>// have the required trailing slash.
<add>func fixRootDirectory(p string) string {
<add> if len(p) == len(`\\?\c:`) {
<add> if os.IsPathSeparator(p[0]) && os.IsPathSeparator(p[1]) && p[2] == '?' && os.IsPathSeparator(p[3]) && p[5] == ':' {
<add> return p + `\`
<add> }
<add> }
<add> return p
<add>}
<add>
<ide> func makeSecurityAttributes(sddl string) (*windows.SecurityAttributes, error) {
<ide> var sa windows.SecurityAttributes
<ide> sa.Length = uint32(unsafe.Sizeof(sa)) | 1 |
Java | Java | add more systrace to rn startup | b1d205a28f7897e582cced36947c84e7bce7dd09 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/ReactInstanceManager.java
<ide> private ReactApplicationContext createReactContext(
<ide> catalystInstance.setGlobalVariable("__RCTProfileIsProfiling", "true");
<ide> }
<ide> ReactMarker.logMarker(ReactMarkerConstants.PRE_RUN_JS_BUNDLE_START);
<add> Systrace.beginSection(TRACE_TAG_REACT_JAVA_BRIDGE, "runJSBundle");
<ide> catalystInstance.runJSBundle();
<add> Systrace.endSection(TRACE_TAG_REACT_JAVA_BRIDGE);
<add>
<ide> reactContext.initializeWithInstance(catalystInstance);
<ide>
<add>
<ide> return reactContext;
<ide> }
<ide>
<ide> private void processPackage(
<ide> if (reactPackage instanceof ReactPackageLogger) {
<ide> ((ReactPackageLogger) reactPackage).endProcessPackage();
<ide> }
<del> SystraceMessage.endSection(TRACE_TAG_REACT_JAVA_BRIDGE).flush();
<add> SystraceMessage.endSection(TRACE_TAG_REACT_JAVA_BRIDGE);
<ide> }
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/bridge/CatalystInstanceImpl.java
<ide>
<ide> package com.facebook.react.bridge;
<ide>
<add>import static com.facebook.systrace.Systrace.TRACE_TAG_REACT_JAVA_BRIDGE;
<add>
<ide> import android.content.res.AssetManager;
<ide> import android.os.AsyncTask;
<ide> import android.util.Log;
<ide> private CatalystInstanceImpl(
<ide> final JSBundleLoader jsBundleLoader,
<ide> NativeModuleCallExceptionHandler nativeModuleCallExceptionHandler) {
<ide> Log.d(ReactConstants.TAG, "Initializing React Xplat Bridge.");
<add> Systrace.beginSection(TRACE_TAG_REACT_JAVA_BRIDGE, "createCatalystInstanceImpl");
<add>
<ide> mHybridData = initHybrid();
<ide>
<ide> mReactQueueConfiguration = ReactQueueConfigurationImpl.create(
<ide> private CatalystInstanceImpl(
<ide> mNativeModuleCallExceptionHandler = nativeModuleCallExceptionHandler;
<ide> mNativeModulesQueueThread = mReactQueueConfiguration.getNativeModulesQueueThread();
<ide> mTraceListener = new JSProfilerTraceListener(this);
<add> Systrace.endSection(TRACE_TAG_REACT_JAVA_BRIDGE);
<ide>
<ide> Log.d(ReactConstants.TAG, "Initializing React Xplat Bridge before initializeBridge");
<add> Systrace.beginSection(TRACE_TAG_REACT_JAVA_BRIDGE, "initializeCxxBridge");
<ide> initializeBridge(
<ide> new BridgeCallback(this),
<ide> jsExecutor,
<ide> private CatalystInstanceImpl(
<ide> mNativeModuleRegistry.getJavaModules(this),
<ide> mNativeModuleRegistry.getCxxModules());
<ide> Log.d(ReactConstants.TAG, "Initializing React Xplat Bridge after initializeBridge");
<add> Systrace.endSection(TRACE_TAG_REACT_JAVA_BRIDGE);
<ide>
<ide> mJavaScriptContextHolder = new JavaScriptContextHolder(getJavaScriptContext());
<ide> } | 2 |
Mixed | Javascript | move all helpers to src/helpers | c8bdca62e89b4d6d9f334cec7a8bfa673725014c | <ide><path>docs/getting-started/v3-migration.md
<ide> Chart.js 3.0 introduces a number of breaking changes. Chart.js 2.0 was released
<ide> * `helpers.getValueAtIndexOrDefault` was renamed to `helpers.valueAtIndexOrDefault`
<ide> * `helpers.easingEffects` was renamed to `helpers.easing.effects`
<ide> * `helpers.log10` was renamed to `helpers.math.log10`
<add>* `helpers.almostEquals` was renamed to `helpers.math.almostEquals`
<add>* `helpers.almostWhole` was renamed to `helpers.math.almostWhole`
<add>* `helpers._decimalPlaces` was renamed to `helpers.math._decimalPlaces`
<add>* `helpers.distanceBetweenPoints` was renamed to `helpers.math.distanceBetweenPoints`
<add>* `helpers.isNumber` was renamed to `helpers.math.isNumber`
<add>* `helpers.sign` was renamed to `helpers.math.sign`
<add>* `helpers.toDegrees` was renamed to `helpers.math.toDegrees`
<add>* `helpers.toRadians` was renamed to `helpers.math.toRadians`
<add>* `helpers.getAngleFromPoint` was renamed to `helpers.math.getAngleFromPoint`
<add>* `helpers.splineCurveMonotone` was renamed to `helpers.curve.splineCurveMonotone`
<add>* `helpers.splineCurve` was renamed to `helpers.curve.splineCurve`
<add>* `helpers.retinaScale` was renamed to `helpers.dom.retinaScale`
<add>* `helpers.getMaximumWidth` was renamed to `helpers.dom.getMaximumWidth`
<add>* `helpers.getMaximumHeight` was renamed to `helpers.dom.getMaximumHeight`
<add>* `helpers.getRelativePosition` was renamed to `helpers.dom.getRelativePosition`
<add>* `helpers.getStyle` was renamed to `helpers.dom.getStyle`
<ide> * `Chart.Animation.animationObject` was renamed to `Chart.Animation`
<ide> * `Chart.Animation.chartInstance` was renamed to `Chart.Animation.chart`
<ide> * `DatasetController.updateElement` was renamed to `DatasetController.updateElements`
<ide><path>src/controllers/controller.bar.js
<ide> module.exports = DatasetController.extend({
<ide> value = custom.barStart;
<ide> length = custom.barEnd - custom.barStart;
<ide> // bars crossing origin are not stacked
<del> if (value !== 0 && helpers.sign(value) !== helpers.sign(custom.barEnd)) {
<add> if (value !== 0 && helpers.math.sign(value) !== helpers.math.sign(custom.barEnd)) {
<ide> start = 0;
<ide> }
<ide> start += value;
<ide><path>src/controllers/controller.line.js
<ide> module.exports = DatasetController.extend({
<ide> }
<ide>
<ide> if (lineModel.cubicInterpolationMode === 'monotone') {
<del> helpers.splineCurveMonotone(points);
<add> helpers.curve.splineCurveMonotone(points);
<ide> } else {
<ide> for (i = 0, ilen = points.length; i < ilen; ++i) {
<ide> const model = points[i]._model;
<del> const controlPoints = helpers.splineCurve(
<add> const controlPoints = helpers.curve.splineCurve(
<ide> points[Math.max(0, i - 1)]._model,
<ide> model,
<ide> points[Math.min(i + 1, ilen - 1)]._model,
<ide><path>src/controllers/controller.radar.js
<ide> module.exports = DatasetController.extend({
<ide>
<ide> for (i = 0, ilen = points.length; i < ilen; ++i) {
<ide> model = points[i]._model;
<del> controlPoints = helpers.splineCurve(
<add> controlPoints = helpers.curve.splineCurve(
<ide> previousItem(points, i)._model,
<ide> model,
<ide> nextItem(points, i)._model,
<ide><path>src/core/core.controller.js
<ide> helpers.extend(Chart.prototype, /** @lends Chart */ {
<ide> // Before init plugin notification
<ide> plugins.notify(me, 'beforeInit');
<ide>
<del> helpers.retinaScale(me, me.options.devicePixelRatio);
<add> helpers.dom.retinaScale(me, me.options.devicePixelRatio);
<ide>
<ide> me.bindEvents();
<ide>
<ide> helpers.extend(Chart.prototype, /** @lends Chart */ {
<ide> // the canvas display style uses the same integer values to avoid blurring effect.
<ide>
<ide> // Set to 0 instead of canvas.size because the size defaults to 300x150 if the element is collapsed
<del> var newWidth = Math.max(0, Math.floor(helpers.getMaximumWidth(canvas)));
<del> var newHeight = Math.max(0, Math.floor(aspectRatio ? newWidth / aspectRatio : helpers.getMaximumHeight(canvas)));
<add> var newWidth = Math.max(0, Math.floor(helpers.dom.getMaximumWidth(canvas)));
<add> var newHeight = Math.max(0, Math.floor(aspectRatio ? newWidth / aspectRatio : helpers.dom.getMaximumHeight(canvas)));
<ide>
<ide> if (me.width === newWidth && me.height === newHeight) {
<ide> return;
<ide> helpers.extend(Chart.prototype, /** @lends Chart */ {
<ide> canvas.style.width = newWidth + 'px';
<ide> canvas.style.height = newHeight + 'px';
<ide>
<del> helpers.retinaScale(me, options.devicePixelRatio);
<add> helpers.dom.retinaScale(me, options.devicePixelRatio);
<ide>
<ide> if (!silent) {
<ide> // Notify any plugins about the resize
<ide><path>src/core/core.datasetController.js
<ide> function applyStack(stack, value, dsIndex, allOther) {
<ide> break;
<ide> }
<ide> otherValue = stack.values[datasetIndex];
<del> if (!isNaN(otherValue) && (value === 0 || helpers.sign(value) === helpers.sign(otherValue))) {
<add> if (!isNaN(otherValue) && (value === 0 || helpers.math.sign(value) === helpers.math.sign(otherValue))) {
<ide> value += otherValue;
<ide> }
<ide> }
<ide><path>src/core/core.element.js
<ide> 'use strict';
<ide>
<del>const color = require('chartjs-color');
<del>const helpers = require('../helpers/index');
<add>import color from 'chartjs-color';
<add>import helpers from '../helpers/index';
<add>import {isNumber} from '../helpers/helpers.math';
<ide>
<ide> function interpolate(start, view, model, ease) {
<ide> var keys = Object.keys(model);
<ide> class Element {
<ide> }
<ide>
<ide> hasValue() {
<del> return helpers.isNumber(this._model.x) && helpers.isNumber(this._model.y);
<add> return isNumber(this._model.x) && isNumber(this._model.y);
<ide> }
<ide> }
<ide>
<ide> Element.extend = helpers.inherits;
<del>
<del>module.exports = Element;
<add>export default Element;
<ide><path>src/core/core.helpers.js
<del>'use strict';
<del>
<del>var color = require('chartjs-color');
<del>var defaults = require('./core.defaults');
<del>var helpers = require('../helpers/index');
<del>
<del>module.exports = function() {
<del>
<del> // -- Basic js utility methods
<del>
<del> helpers.where = function(collection, filterCallback) {
<del> if (helpers.isArray(collection) && Array.prototype.filter) {
<del> return collection.filter(filterCallback);
<del> }
<del> var filtered = [];
<del>
<del> helpers.each(collection, function(item) {
<del> if (filterCallback(item)) {
<del> filtered.push(item);
<del> }
<del> });
<del>
<del> return filtered;
<del> };
<del> helpers.findIndex = Array.prototype.findIndex ?
<del> function(array, callback, scope) {
<del> return array.findIndex(callback, scope);
<del> } :
<del> function(array, callback, scope) {
<del> scope = scope === undefined ? array : scope;
<del> for (var i = 0, ilen = array.length; i < ilen; ++i) {
<del> if (callback.call(scope, array[i], i, array)) {
<del> return i;
<del> }
<del> }
<del> return -1;
<del> };
<del> helpers.findNextWhere = function(arrayToSearch, filterCallback, startIndex) {
<del> // Default to start of the array
<del> if (helpers.isNullOrUndef(startIndex)) {
<del> startIndex = -1;
<del> }
<del> for (var i = startIndex + 1; i < arrayToSearch.length; i++) {
<del> var currentItem = arrayToSearch[i];
<del> if (filterCallback(currentItem)) {
<del> return currentItem;
<del> }
<del> }
<del> };
<del> helpers.findPreviousWhere = function(arrayToSearch, filterCallback, startIndex) {
<del> // Default to end of the array
<del> if (helpers.isNullOrUndef(startIndex)) {
<del> startIndex = arrayToSearch.length;
<del> }
<del> for (var i = startIndex - 1; i >= 0; i--) {
<del> var currentItem = arrayToSearch[i];
<del> if (filterCallback(currentItem)) {
<del> return currentItem;
<del> }
<del> }
<del> };
<del>
<del> // -- Math methods
<del> helpers.isNumber = function(n) {
<del> return !isNaN(parseFloat(n)) && isFinite(n);
<del> };
<del> helpers.almostEquals = function(x, y, epsilon) {
<del> return Math.abs(x - y) < epsilon;
<del> };
<del> helpers.almostWhole = function(x, epsilon) {
<del> var rounded = Math.round(x);
<del> return ((rounded - epsilon) <= x) && ((rounded + epsilon) >= x);
<del> };
<del> helpers._setMinAndMax = function(array, target) {
<del> var i, ilen, value;
<del>
<del> for (i = 0, ilen = array.length; i < ilen; i++) {
<del> value = array[i];
<del> if (!isNaN(value)) {
<del> target.min = Math.min(target.min, value);
<del> target.max = Math.max(target.max, value);
<del> }
<del> }
<del> };
<del> helpers._setMinAndMaxByKey = function(array, target, property) {
<del> var i, ilen, value;
<del>
<del> for (i = 0, ilen = array.length; i < ilen; i++) {
<del> value = array[i][property];
<del> if (!isNaN(value)) {
<del> target.min = Math.min(target.min, value);
<del> target.max = Math.max(target.max, value);
<del> }
<del> }
<del> };
<del> helpers.sign = Math.sign ?
<del> function(x) {
<del> return Math.sign(x);
<del> } :
<del> function(x) {
<del> x = +x; // convert to a number
<del> if (x === 0 || isNaN(x)) {
<del> return x;
<del> }
<del> return x > 0 ? 1 : -1;
<del> };
<del> helpers.toRadians = function(degrees) {
<del> return degrees * (Math.PI / 180);
<del> };
<del> helpers.toDegrees = function(radians) {
<del> return radians * (180 / Math.PI);
<del> };
<del>
<del> /**
<del> * Returns the number of decimal places
<del> * i.e. the number of digits after the decimal point, of the value of this Number.
<del> * @param {number} x - A number.
<del> * @returns {number} The number of decimal places.
<del> * @private
<del> */
<del> helpers._decimalPlaces = function(x) {
<del> if (!helpers.isFinite(x)) {
<del> return;
<del> }
<del> var e = 1;
<del> var p = 0;
<del> while (Math.round(x * e) / e !== x) {
<del> e *= 10;
<del> p++;
<del> }
<del> return p;
<del> };
<del>
<del> // Gets the angle from vertical upright to the point about a centre.
<del> helpers.getAngleFromPoint = function(centrePoint, anglePoint) {
<del> var distanceFromXCenter = anglePoint.x - centrePoint.x;
<del> var distanceFromYCenter = anglePoint.y - centrePoint.y;
<del> var radialDistanceFromCenter = Math.sqrt(distanceFromXCenter * distanceFromXCenter + distanceFromYCenter * distanceFromYCenter);
<del>
<del> var angle = Math.atan2(distanceFromYCenter, distanceFromXCenter);
<del>
<del> if (angle < (-0.5 * Math.PI)) {
<del> angle += 2.0 * Math.PI; // make sure the returned angle is in the range of (-PI/2, 3PI/2]
<del> }
<del>
<del> return {
<del> angle: angle,
<del> distance: radialDistanceFromCenter
<del> };
<del> };
<del> helpers.distanceBetweenPoints = function(pt1, pt2) {
<del> return Math.sqrt(Math.pow(pt2.x - pt1.x, 2) + Math.pow(pt2.y - pt1.y, 2));
<del> };
<del>
<del> helpers.splineCurve = function(firstPoint, middlePoint, afterPoint, t) {
<del> // Props to Rob Spencer at scaled innovation for his post on splining between points
<del> // http://scaledinnovation.com/analytics/splines/aboutSplines.html
<del>
<del> // This function must also respect "skipped" points
<del>
<del> var previous = firstPoint.skip ? middlePoint : firstPoint;
<del> var current = middlePoint;
<del> var next = afterPoint.skip ? middlePoint : afterPoint;
<del>
<del> var d01 = Math.sqrt(Math.pow(current.x - previous.x, 2) + Math.pow(current.y - previous.y, 2));
<del> var d12 = Math.sqrt(Math.pow(next.x - current.x, 2) + Math.pow(next.y - current.y, 2));
<del>
<del> var s01 = d01 / (d01 + d12);
<del> var s12 = d12 / (d01 + d12);
<del>
<del> // If all points are the same, s01 & s02 will be inf
<del> s01 = isNaN(s01) ? 0 : s01;
<del> s12 = isNaN(s12) ? 0 : s12;
<del>
<del> var fa = t * s01; // scaling factor for triangle Ta
<del> var fb = t * s12;
<del>
<del> return {
<del> previous: {
<del> x: current.x - fa * (next.x - previous.x),
<del> y: current.y - fa * (next.y - previous.y)
<del> },
<del> next: {
<del> x: current.x + fb * (next.x - previous.x),
<del> y: current.y + fb * (next.y - previous.y)
<del> }
<del> };
<del> };
<del> helpers.EPSILON = Number.EPSILON || 1e-14;
<del> helpers.splineCurveMonotone = function(points) {
<del> // This function calculates Bézier control points in a similar way than |splineCurve|,
<del> // but preserves monotonicity of the provided data and ensures no local extremums are added
<del> // between the dataset discrete points due to the interpolation.
<del> // See : https://en.wikipedia.org/wiki/Monotone_cubic_interpolation
<del>
<del> var pointsWithTangents = (points || []).map(function(point) {
<del> return {
<del> model: point._model,
<del> deltaK: 0,
<del> mK: 0
<del> };
<del> });
<del>
<del> // Calculate slopes (deltaK) and initialize tangents (mK)
<del> var pointsLen = pointsWithTangents.length;
<del> var i, pointBefore, pointCurrent, pointAfter;
<del> for (i = 0; i < pointsLen; ++i) {
<del> pointCurrent = pointsWithTangents[i];
<del> if (pointCurrent.model.skip) {
<del> continue;
<del> }
<del>
<del> pointBefore = i > 0 ? pointsWithTangents[i - 1] : null;
<del> pointAfter = i < pointsLen - 1 ? pointsWithTangents[i + 1] : null;
<del> if (pointAfter && !pointAfter.model.skip) {
<del> var slopeDeltaX = (pointAfter.model.x - pointCurrent.model.x);
<del>
<del> // In the case of two points that appear at the same x pixel, slopeDeltaX is 0
<del> pointCurrent.deltaK = slopeDeltaX !== 0 ? (pointAfter.model.y - pointCurrent.model.y) / slopeDeltaX : 0;
<del> }
<del>
<del> if (!pointBefore || pointBefore.model.skip) {
<del> pointCurrent.mK = pointCurrent.deltaK;
<del> } else if (!pointAfter || pointAfter.model.skip) {
<del> pointCurrent.mK = pointBefore.deltaK;
<del> } else if (this.sign(pointBefore.deltaK) !== this.sign(pointCurrent.deltaK)) {
<del> pointCurrent.mK = 0;
<del> } else {
<del> pointCurrent.mK = (pointBefore.deltaK + pointCurrent.deltaK) / 2;
<del> }
<del> }
<del>
<del> // Adjust tangents to ensure monotonic properties
<del> var alphaK, betaK, tauK, squaredMagnitude;
<del> for (i = 0; i < pointsLen - 1; ++i) {
<del> pointCurrent = pointsWithTangents[i];
<del> pointAfter = pointsWithTangents[i + 1];
<del> if (pointCurrent.model.skip || pointAfter.model.skip) {
<del> continue;
<del> }
<del>
<del> if (helpers.almostEquals(pointCurrent.deltaK, 0, this.EPSILON)) {
<del> pointCurrent.mK = pointAfter.mK = 0;
<del> continue;
<del> }
<del>
<del> alphaK = pointCurrent.mK / pointCurrent.deltaK;
<del> betaK = pointAfter.mK / pointCurrent.deltaK;
<del> squaredMagnitude = Math.pow(alphaK, 2) + Math.pow(betaK, 2);
<del> if (squaredMagnitude <= 9) {
<del> continue;
<del> }
<del>
<del> tauK = 3 / Math.sqrt(squaredMagnitude);
<del> pointCurrent.mK = alphaK * tauK * pointCurrent.deltaK;
<del> pointAfter.mK = betaK * tauK * pointCurrent.deltaK;
<del> }
<del>
<del> // Compute control points
<del> var deltaX;
<del> for (i = 0; i < pointsLen; ++i) {
<del> pointCurrent = pointsWithTangents[i];
<del> if (pointCurrent.model.skip) {
<del> continue;
<del> }
<del>
<del> pointBefore = i > 0 ? pointsWithTangents[i - 1] : null;
<del> pointAfter = i < pointsLen - 1 ? pointsWithTangents[i + 1] : null;
<del> if (pointBefore && !pointBefore.model.skip) {
<del> deltaX = (pointCurrent.model.x - pointBefore.model.x) / 3;
<del> pointCurrent.model.controlPointPreviousX = pointCurrent.model.x - deltaX;
<del> pointCurrent.model.controlPointPreviousY = pointCurrent.model.y - deltaX * pointCurrent.mK;
<del> }
<del> if (pointAfter && !pointAfter.model.skip) {
<del> deltaX = (pointAfter.model.x - pointCurrent.model.x) / 3;
<del> pointCurrent.model.controlPointNextX = pointCurrent.model.x + deltaX;
<del> pointCurrent.model.controlPointNextY = pointCurrent.model.y + deltaX * pointCurrent.mK;
<del> }
<del> }
<del> };
<del> // Implementation of the nice number algorithm used in determining where axis labels will go
<del> helpers.niceNum = function(range, round) {
<del> var exponent = Math.floor(helpers.math.log10(range));
<del> var fraction = range / Math.pow(10, exponent);
<del> var niceFraction;
<del>
<del> if (round) {
<del> if (fraction < 1.5) {
<del> niceFraction = 1;
<del> } else if (fraction < 3) {
<del> niceFraction = 2;
<del> } else if (fraction < 7) {
<del> niceFraction = 5;
<del> } else {
<del> niceFraction = 10;
<del> }
<del> } else if (fraction <= 1.0) {
<del> niceFraction = 1;
<del> } else if (fraction <= 2) {
<del> niceFraction = 2;
<del> } else if (fraction <= 5) {
<del> niceFraction = 5;
<del> } else {
<del> niceFraction = 10;
<del> }
<del>
<del> return niceFraction * Math.pow(10, exponent);
<del> };
<del> // Request animation polyfill - https://www.paulirish.com/2011/requestanimationframe-for-smart-animating/
<del> helpers.requestAnimFrame = (function() {
<del> if (typeof window === 'undefined') {
<del> return function(callback) {
<del> callback();
<del> };
<del> }
<del> return window.requestAnimationFrame ||
<del> window.webkitRequestAnimationFrame ||
<del> window.mozRequestAnimationFrame ||
<del> window.oRequestAnimationFrame ||
<del> window.msRequestAnimationFrame ||
<del> function(callback) {
<del> return window.setTimeout(callback, 1000 / 60);
<del> };
<del> }());
<del> // -- DOM methods
<del> helpers.getRelativePosition = function(evt, chart) {
<del> var mouseX, mouseY;
<del> var e = evt.originalEvent || evt;
<del> var canvas = evt.target || evt.srcElement;
<del> var boundingRect = canvas.getBoundingClientRect();
<del>
<del> var touches = e.touches;
<del> if (touches && touches.length > 0) {
<del> mouseX = touches[0].clientX;
<del> mouseY = touches[0].clientY;
<del>
<del> } else {
<del> mouseX = e.clientX;
<del> mouseY = e.clientY;
<del> }
<del>
<del> // Scale mouse coordinates into canvas coordinates
<del> // by following the pattern laid out by 'jerryj' in the comments of
<del> // https://www.html5canvastutorials.com/advanced/html5-canvas-mouse-coordinates/
<del> var paddingLeft = parseFloat(helpers.getStyle(canvas, 'padding-left'));
<del> var paddingTop = parseFloat(helpers.getStyle(canvas, 'padding-top'));
<del> var paddingRight = parseFloat(helpers.getStyle(canvas, 'padding-right'));
<del> var paddingBottom = parseFloat(helpers.getStyle(canvas, 'padding-bottom'));
<del> var width = boundingRect.right - boundingRect.left - paddingLeft - paddingRight;
<del> var height = boundingRect.bottom - boundingRect.top - paddingTop - paddingBottom;
<del>
<del> // We divide by the current device pixel ratio, because the canvas is scaled up by that amount in each direction. However
<del> // the backend model is in unscaled coordinates. Since we are going to deal with our model coordinates, we go back here
<del> mouseX = Math.round((mouseX - boundingRect.left - paddingLeft) / (width) * canvas.width / chart.currentDevicePixelRatio);
<del> mouseY = Math.round((mouseY - boundingRect.top - paddingTop) / (height) * canvas.height / chart.currentDevicePixelRatio);
<del>
<del> return {
<del> x: mouseX,
<del> y: mouseY
<del> };
<del>
<del> };
<del>
<del> // Private helper function to convert max-width/max-height values that may be percentages into a number
<del> function parseMaxStyle(styleValue, node, parentProperty) {
<del> var valueInPixels;
<del> if (typeof styleValue === 'string') {
<del> valueInPixels = parseInt(styleValue, 10);
<del>
<del> if (styleValue.indexOf('%') !== -1) {
<del> // percentage * size in dimension
<del> valueInPixels = valueInPixels / 100 * node.parentNode[parentProperty];
<del> }
<del> } else {
<del> valueInPixels = styleValue;
<del> }
<del>
<del> return valueInPixels;
<del> }
<del>
<del> /**
<del> * Returns if the given value contains an effective constraint.
<del> * @private
<del> */
<del> function isConstrainedValue(value) {
<del> return value !== undefined && value !== null && value !== 'none';
<del> }
<del>
<del> /**
<del> * Returns the max width or height of the given DOM node in a cross-browser compatible fashion
<del> * @param {HTMLElement} domNode - the node to check the constraint on
<del> * @param {string} maxStyle - the style that defines the maximum for the direction we are using ('max-width' / 'max-height')
<del> * @param {string} percentageProperty - property of parent to use when calculating width as a percentage
<del> * @see {@link https://www.nathanaeljones.com/blog/2013/reading-max-width-cross-browser}
<del> */
<del> function getConstraintDimension(domNode, maxStyle, percentageProperty) {
<del> var view = document.defaultView;
<del> var parentNode = helpers._getParentNode(domNode);
<del> var constrainedNode = view.getComputedStyle(domNode)[maxStyle];
<del> var constrainedContainer = view.getComputedStyle(parentNode)[maxStyle];
<del> var hasCNode = isConstrainedValue(constrainedNode);
<del> var hasCContainer = isConstrainedValue(constrainedContainer);
<del> var infinity = Number.POSITIVE_INFINITY;
<del>
<del> if (hasCNode || hasCContainer) {
<del> return Math.min(
<del> hasCNode ? parseMaxStyle(constrainedNode, domNode, percentageProperty) : infinity,
<del> hasCContainer ? parseMaxStyle(constrainedContainer, parentNode, percentageProperty) : infinity);
<del> }
<del>
<del> return 'none';
<del> }
<del> // returns Number or undefined if no constraint
<del> helpers.getConstraintWidth = function(domNode) {
<del> return getConstraintDimension(domNode, 'max-width', 'clientWidth');
<del> };
<del> // returns Number or undefined if no constraint
<del> helpers.getConstraintHeight = function(domNode) {
<del> return getConstraintDimension(domNode, 'max-height', 'clientHeight');
<del> };
<del> /**
<del> * @private
<del> */
<del> helpers._calculatePadding = function(container, padding, parentDimension) {
<del> padding = helpers.getStyle(container, padding);
<del>
<del> return padding.indexOf('%') > -1 ? parentDimension * parseInt(padding, 10) / 100 : parseInt(padding, 10);
<del> };
<del> /**
<del> * @private
<del> */
<del> helpers._getParentNode = function(domNode) {
<del> var parent = domNode.parentNode;
<del> if (parent && parent.toString() === '[object ShadowRoot]') {
<del> parent = parent.host;
<del> }
<del> return parent;
<del> };
<del> helpers.getMaximumWidth = function(domNode) {
<del> var container = helpers._getParentNode(domNode);
<del> if (!container) {
<del> return domNode.clientWidth;
<del> }
<del>
<del> var clientWidth = container.clientWidth;
<del> var paddingLeft = helpers._calculatePadding(container, 'padding-left', clientWidth);
<del> var paddingRight = helpers._calculatePadding(container, 'padding-right', clientWidth);
<del>
<del> var w = clientWidth - paddingLeft - paddingRight;
<del> var cw = helpers.getConstraintWidth(domNode);
<del> return isNaN(cw) ? w : Math.min(w, cw);
<del> };
<del> helpers.getMaximumHeight = function(domNode) {
<del> var container = helpers._getParentNode(domNode);
<del> if (!container) {
<del> return domNode.clientHeight;
<del> }
<del>
<del> var clientHeight = container.clientHeight;
<del> var paddingTop = helpers._calculatePadding(container, 'padding-top', clientHeight);
<del> var paddingBottom = helpers._calculatePadding(container, 'padding-bottom', clientHeight);
<del>
<del> var h = clientHeight - paddingTop - paddingBottom;
<del> var ch = helpers.getConstraintHeight(domNode);
<del> return isNaN(ch) ? h : Math.min(h, ch);
<del> };
<del> helpers.getStyle = function(el, property) {
<del> return el.currentStyle ?
<del> el.currentStyle[property] :
<del> document.defaultView.getComputedStyle(el, null).getPropertyValue(property);
<del> };
<del> helpers.retinaScale = function(chart, forceRatio) {
<del> var pixelRatio = chart.currentDevicePixelRatio = forceRatio || (typeof window !== 'undefined' && window.devicePixelRatio) || 1;
<del> if (pixelRatio === 1) {
<del> return;
<del> }
<del>
<del> var canvas = chart.canvas;
<del> var height = chart.height;
<del> var width = chart.width;
<del>
<del> canvas.height = height * pixelRatio;
<del> canvas.width = width * pixelRatio;
<del> chart.ctx.scale(pixelRatio, pixelRatio);
<del>
<del> // If no style has been set on the canvas, the render size is used as display size,
<del> // making the chart visually bigger, so let's enforce it to the "correct" values.
<del> // See https://github.com/chartjs/Chart.js/issues/3575
<del> if (!canvas.style.height && !canvas.style.width) {
<del> canvas.style.height = height + 'px';
<del> canvas.style.width = width + 'px';
<del> }
<del> };
<del> // -- Canvas methods
<del> helpers.fontString = function(pixelSize, fontStyle, fontFamily) {
<del> return fontStyle + ' ' + pixelSize + 'px ' + fontFamily;
<del> };
<del> helpers.longestText = function(ctx, font, arrayOfThings, cache) {
<del> cache = cache || {};
<del> var data = cache.data = cache.data || {};
<del> var gc = cache.garbageCollect = cache.garbageCollect || [];
<del>
<del> if (cache.font !== font) {
<del> data = cache.data = {};
<del> gc = cache.garbageCollect = [];
<del> cache.font = font;
<del> }
<del>
<del> ctx.font = font;
<del> var longest = 0;
<del> var ilen = arrayOfThings.length;
<del> var i, j, jlen, thing, nestedThing;
<del> for (i = 0; i < ilen; i++) {
<del> thing = arrayOfThings[i];
<del>
<del> // Undefined strings and arrays should not be measured
<del> if (thing !== undefined && thing !== null && helpers.isArray(thing) !== true) {
<del> longest = helpers.measureText(ctx, data, gc, longest, thing);
<del> } else if (helpers.isArray(thing)) {
<del> // if it is an array lets measure each element
<del> // to do maybe simplify this function a bit so we can do this more recursively?
<del> for (j = 0, jlen = thing.length; j < jlen; j++) {
<del> nestedThing = thing[j];
<del> // Undefined strings and arrays should not be measured
<del> if (nestedThing !== undefined && nestedThing !== null && !helpers.isArray(nestedThing)) {
<del> longest = helpers.measureText(ctx, data, gc, longest, nestedThing);
<del> }
<del> }
<del> }
<del> }
<del>
<del> var gcLen = gc.length / 2;
<del> if (gcLen > arrayOfThings.length) {
<del> for (i = 0; i < gcLen; i++) {
<del> delete data[gc[i]];
<del> }
<del> gc.splice(0, gcLen);
<del> }
<del> return longest;
<del> };
<del> helpers.measureText = function(ctx, data, gc, longest, string) {
<del> var textWidth = data[string];
<del> if (!textWidth) {
<del> textWidth = data[string] = ctx.measureText(string).width;
<del> gc.push(string);
<del> }
<del> if (textWidth > longest) {
<del> longest = textWidth;
<del> }
<del> return longest;
<del> };
<del>
<del> helpers.color = !color ?
<del> function(value) {
<del> console.error('Color.js not found!');
<del> return value;
<del> } :
<del> function(value) {
<del> if (value instanceof CanvasGradient) {
<del> value = defaults.global.defaultColor;
<del> }
<del>
<del> return color(value);
<del> };
<del>
<del> helpers.getHoverColor = function(colorValue) {
<del> return (colorValue instanceof CanvasPattern || colorValue instanceof CanvasGradient) ?
<del> colorValue :
<del> helpers.color(colorValue).saturate(0.5).darken(0.1).rgbString();
<del> };
<del>};
<ide><path>src/core/core.interaction.js
<ide> 'use strict';
<ide>
<del>var helpers = require('../helpers/index');
<add>import helpers from '../helpers/index';
<add>import {isNumber} from '../helpers/helpers.math';
<ide>
<ide> /**
<ide> * Helper function to get relative position for an event
<ide> function getRelativePosition(e, chart) {
<ide> };
<ide> }
<ide>
<del> return helpers.getRelativePosition(e, chart);
<add> return helpers.dom.getRelativePosition(e, chart);
<ide> }
<ide>
<ide> /**
<ide> function evaluateItemsAtIndex(chart, axis, position, handler) {
<ide> return false;
<ide> }
<ide> const index = iScale.getIndexForPixel(position[axis]);
<del> if (!helpers.isNumber(index)) {
<add> if (!isNumber(index)) {
<ide> return false;
<ide> }
<ide> indices.push(index);
<ide> function getNearestItems(chart, position, axis, intersect) {
<ide> * Contains interaction related functions
<ide> * @namespace Chart.Interaction
<ide> */
<del>module.exports = {
<add>export default {
<ide> // Helper function for different modes
<ide> modes: {
<ide> /**
<ide><path>src/core/core.scale.js
<ide> class Scale extends Element {
<ide> maxHeight = me.maxHeight - getTickMarkLength(options.gridLines)
<ide> - tickOpts.padding - getScaleLabelHeight(options.scaleLabel);
<ide> maxLabelDiagonal = Math.sqrt(maxLabelWidth * maxLabelWidth + maxLabelHeight * maxLabelHeight);
<del> labelRotation = helpers.toDegrees(Math.min(
<add> labelRotation = helpers.math.toDegrees(Math.min(
<ide> Math.asin(Math.min((labelSizes.highest.height + 6) / tickWidth, 1)),
<ide> Math.asin(Math.min(maxHeight / maxLabelDiagonal, 1)) - Math.asin(maxLabelHeight / maxLabelDiagonal)
<ide> ));
<ide> class Scale extends Element {
<ide> if (isHorizontal) {
<ide> // A horizontal axis is more constrained by the height.
<ide> var isRotated = me.labelRotation !== 0;
<del> var angleRadians = helpers.toRadians(me.labelRotation);
<add> var angleRadians = helpers.math.toRadians(me.labelRotation);
<ide> var cosRotation = Math.cos(angleRadians);
<ide> var sinRotation = Math.sin(angleRadians);
<ide>
<ide> class Scale extends Element {
<ide> var optionTicks = me.options.ticks;
<ide>
<ide> // Calculate space needed by label in axis direction.
<del> var rot = helpers.toRadians(me.labelRotation);
<add> var rot = helpers.math.toRadians(me.labelRotation);
<ide> var cos = Math.abs(Math.cos(rot));
<ide> var sin = Math.abs(Math.sin(rot));
<ide>
<ide> class Scale extends Element {
<ide> const fonts = parseTickFontOptions(optionTicks);
<ide> const tickPadding = optionTicks.padding;
<ide> const tl = getTickMarkLength(options.gridLines);
<del> const rotation = -helpers.toRadians(me.labelRotation);
<add> const rotation = -helpers.math.toRadians(me.labelRotation);
<ide> const items = [];
<ide> let i, ilen, tick, label, x, y, textAlign, pixel, font, lineHeight, lineCount, textOffset;
<ide>
<ide><path>src/elements/element.arc.js
<ide>
<ide> import defaults from '../core/core.defaults';
<ide> import Element from '../core/core.element';
<del>import helpers from '../helpers';
<add>import {getAngleFromPoint} from '../helpers/helpers.math';
<ide> const TAU = Math.PI * 2;
<ide>
<ide> defaults._set('global', {
<ide> class Arc extends Element {
<ide> var vm = this._view;
<ide>
<ide> if (vm) {
<del> var pointRelativePosition = helpers.getAngleFromPoint(vm, {x: chartX, y: chartY});
<add> var pointRelativePosition = getAngleFromPoint(vm, {x: chartX, y: chartY});
<ide> var angle = pointRelativePosition.angle;
<ide> var distance = pointRelativePosition.distance;
<ide>
<ide><path>src/helpers/helpers.curve.js
<add>import {almostEquals, sign} from './helpers.math';
<add>
<add>const EPSILON = Number.EPSILON || 1e-14;
<add>
<add>export function splineCurve(firstPoint, middlePoint, afterPoint, t) {
<add> // Props to Rob Spencer at scaled innovation for his post on splining between points
<add> // http://scaledinnovation.com/analytics/splines/aboutSplines.html
<add>
<add> // This function must also respect "skipped" points
<add>
<add> var previous = firstPoint.skip ? middlePoint : firstPoint;
<add> var current = middlePoint;
<add> var next = afterPoint.skip ? middlePoint : afterPoint;
<add>
<add> var d01 = Math.sqrt(Math.pow(current.x - previous.x, 2) + Math.pow(current.y - previous.y, 2));
<add> var d12 = Math.sqrt(Math.pow(next.x - current.x, 2) + Math.pow(next.y - current.y, 2));
<add>
<add> var s01 = d01 / (d01 + d12);
<add> var s12 = d12 / (d01 + d12);
<add>
<add> // If all points are the same, s01 & s02 will be inf
<add> s01 = isNaN(s01) ? 0 : s01;
<add> s12 = isNaN(s12) ? 0 : s12;
<add>
<add> var fa = t * s01; // scaling factor for triangle Ta
<add> var fb = t * s12;
<add>
<add> return {
<add> previous: {
<add> x: current.x - fa * (next.x - previous.x),
<add> y: current.y - fa * (next.y - previous.y)
<add> },
<add> next: {
<add> x: current.x + fb * (next.x - previous.x),
<add> y: current.y + fb * (next.y - previous.y)
<add> }
<add> };
<add>}
<add>
<add>export function splineCurveMonotone(points) {
<add> // This function calculates Bézier control points in a similar way than |splineCurve|,
<add> // but preserves monotonicity of the provided data and ensures no local extremums are added
<add> // between the dataset discrete points due to the interpolation.
<add> // See : https://en.wikipedia.org/wiki/Monotone_cubic_interpolation
<add>
<add> var pointsWithTangents = (points || []).map(function(point) {
<add> return {
<add> model: point._model,
<add> deltaK: 0,
<add> mK: 0
<add> };
<add> });
<add>
<add> // Calculate slopes (deltaK) and initialize tangents (mK)
<add> var pointsLen = pointsWithTangents.length;
<add> var i, pointBefore, pointCurrent, pointAfter;
<add> for (i = 0; i < pointsLen; ++i) {
<add> pointCurrent = pointsWithTangents[i];
<add> if (pointCurrent.model.skip) {
<add> continue;
<add> }
<add>
<add> pointBefore = i > 0 ? pointsWithTangents[i - 1] : null;
<add> pointAfter = i < pointsLen - 1 ? pointsWithTangents[i + 1] : null;
<add> if (pointAfter && !pointAfter.model.skip) {
<add> var slopeDeltaX = (pointAfter.model.x - pointCurrent.model.x);
<add>
<add> // In the case of two points that appear at the same x pixel, slopeDeltaX is 0
<add> pointCurrent.deltaK = slopeDeltaX !== 0 ? (pointAfter.model.y - pointCurrent.model.y) / slopeDeltaX : 0;
<add> }
<add>
<add> if (!pointBefore || pointBefore.model.skip) {
<add> pointCurrent.mK = pointCurrent.deltaK;
<add> } else if (!pointAfter || pointAfter.model.skip) {
<add> pointCurrent.mK = pointBefore.deltaK;
<add> } else if (sign(pointBefore.deltaK) !== sign(pointCurrent.deltaK)) {
<add> pointCurrent.mK = 0;
<add> } else {
<add> pointCurrent.mK = (pointBefore.deltaK + pointCurrent.deltaK) / 2;
<add> }
<add> }
<add>
<add> // Adjust tangents to ensure monotonic properties
<add> var alphaK, betaK, tauK, squaredMagnitude;
<add> for (i = 0; i < pointsLen - 1; ++i) {
<add> pointCurrent = pointsWithTangents[i];
<add> pointAfter = pointsWithTangents[i + 1];
<add> if (pointCurrent.model.skip || pointAfter.model.skip) {
<add> continue;
<add> }
<add>
<add> if (almostEquals(pointCurrent.deltaK, 0, EPSILON)) {
<add> pointCurrent.mK = pointAfter.mK = 0;
<add> continue;
<add> }
<add>
<add> alphaK = pointCurrent.mK / pointCurrent.deltaK;
<add> betaK = pointAfter.mK / pointCurrent.deltaK;
<add> squaredMagnitude = Math.pow(alphaK, 2) + Math.pow(betaK, 2);
<add> if (squaredMagnitude <= 9) {
<add> continue;
<add> }
<add>
<add> tauK = 3 / Math.sqrt(squaredMagnitude);
<add> pointCurrent.mK = alphaK * tauK * pointCurrent.deltaK;
<add> pointAfter.mK = betaK * tauK * pointCurrent.deltaK;
<add> }
<add>
<add> // Compute control points
<add> var deltaX;
<add> for (i = 0; i < pointsLen; ++i) {
<add> pointCurrent = pointsWithTangents[i];
<add> if (pointCurrent.model.skip) {
<add> continue;
<add> }
<add>
<add> pointBefore = i > 0 ? pointsWithTangents[i - 1] : null;
<add> pointAfter = i < pointsLen - 1 ? pointsWithTangents[i + 1] : null;
<add> if (pointBefore && !pointBefore.model.skip) {
<add> deltaX = (pointCurrent.model.x - pointBefore.model.x) / 3;
<add> pointCurrent.model.controlPointPreviousX = pointCurrent.model.x - deltaX;
<add> pointCurrent.model.controlPointPreviousY = pointCurrent.model.y - deltaX * pointCurrent.mK;
<add> }
<add> if (pointAfter && !pointAfter.model.skip) {
<add> deltaX = (pointAfter.model.x - pointCurrent.model.x) / 3;
<add> pointCurrent.model.controlPointNextX = pointCurrent.model.x + deltaX;
<add> pointCurrent.model.controlPointNextY = pointCurrent.model.y + deltaX * pointCurrent.mK;
<add> }
<add> }
<add>}
<ide><path>src/helpers/helpers.dom.js
<add>/**
<add> * Returns if the given value contains an effective constraint.
<add> * @private
<add> */
<add>function isConstrainedValue(value) {
<add> return value !== undefined && value !== null && value !== 'none';
<add>}
<add>
<add>/**
<add> * @private
<add> */
<add>function _getParentNode(domNode) {
<add> var parent = domNode.parentNode;
<add> if (parent && parent.toString() === '[object ShadowRoot]') {
<add> parent = parent.host;
<add> }
<add> return parent;
<add>}
<add>
<add>// Private helper function to convert max-width/max-height values that may be percentages into a number
<add>function parseMaxStyle(styleValue, node, parentProperty) {
<add> var valueInPixels;
<add> if (typeof styleValue === 'string') {
<add> valueInPixels = parseInt(styleValue, 10);
<add>
<add> if (styleValue.indexOf('%') !== -1) {
<add> // percentage * size in dimension
<add> valueInPixels = valueInPixels / 100 * node.parentNode[parentProperty];
<add> }
<add> } else {
<add> valueInPixels = styleValue;
<add> }
<add>
<add> return valueInPixels;
<add>}
<add>
<add>/**
<add> * Returns the max width or height of the given DOM node in a cross-browser compatible fashion
<add> * @param {HTMLElement} domNode - the node to check the constraint on
<add> * @param {string} maxStyle - the style that defines the maximum for the direction we are using ('max-width' / 'max-height')
<add> * @param {string} percentageProperty - property of parent to use when calculating width as a percentage
<add> * @see {@link https://www.nathanaeljones.com/blog/2013/reading-max-width-cross-browser}
<add> */
<add>function getConstraintDimension(domNode, maxStyle, percentageProperty) {
<add> var view = document.defaultView;
<add> var parentNode = _getParentNode(domNode);
<add> var constrainedNode = view.getComputedStyle(domNode)[maxStyle];
<add> var constrainedContainer = view.getComputedStyle(parentNode)[maxStyle];
<add> var hasCNode = isConstrainedValue(constrainedNode);
<add> var hasCContainer = isConstrainedValue(constrainedContainer);
<add> var infinity = Number.POSITIVE_INFINITY;
<add>
<add> if (hasCNode || hasCContainer) {
<add> return Math.min(
<add> hasCNode ? parseMaxStyle(constrainedNode, domNode, percentageProperty) : infinity,
<add> hasCContainer ? parseMaxStyle(constrainedContainer, parentNode, percentageProperty) : infinity);
<add> }
<add>
<add> return 'none';
<add>}
<add>
<add>export function getStyle(el, property) {
<add> return el.currentStyle ?
<add> el.currentStyle[property] :
<add> document.defaultView.getComputedStyle(el, null).getPropertyValue(property);
<add>}
<add>
<add>// returns Number or undefined if no constraint
<add>function getConstraintWidth(domNode) {
<add> return getConstraintDimension(domNode, 'max-width', 'clientWidth');
<add>}
<add>
<add>// returns Number or undefined if no constraint
<add>function getConstraintHeight(domNode) {
<add> return getConstraintDimension(domNode, 'max-height', 'clientHeight');
<add>}
<add>
<add>/**
<add> * @private
<add> */
<add>function _calculatePadding(container, padding, parentDimension) {
<add> padding = getStyle(container, padding);
<add>
<add> return padding.indexOf('%') > -1 ? parentDimension * parseInt(padding, 10) / 100 : parseInt(padding, 10);
<add>}
<add>
<add>export function getRelativePosition(evt, chart) {
<add> var mouseX, mouseY;
<add> var e = evt.originalEvent || evt;
<add> var canvasElement = evt.target || evt.srcElement;
<add> var boundingRect = canvasElement.getBoundingClientRect();
<add>
<add> var touches = e.touches;
<add> if (touches && touches.length > 0) {
<add> mouseX = touches[0].clientX;
<add> mouseY = touches[0].clientY;
<add>
<add> } else {
<add> mouseX = e.clientX;
<add> mouseY = e.clientY;
<add> }
<add>
<add> // Scale mouse coordinates into canvas coordinates
<add> // by following the pattern laid out by 'jerryj' in the comments of
<add> // https://www.html5canvastutorials.com/advanced/html5-canvas-mouse-coordinates/
<add> var paddingLeft = parseFloat(getStyle(canvasElement, 'padding-left'));
<add> var paddingTop = parseFloat(getStyle(canvasElement, 'padding-top'));
<add> var paddingRight = parseFloat(getStyle(canvasElement, 'padding-right'));
<add> var paddingBottom = parseFloat(getStyle(canvasElement, 'padding-bottom'));
<add> var width = boundingRect.right - boundingRect.left - paddingLeft - paddingRight;
<add> var height = boundingRect.bottom - boundingRect.top - paddingTop - paddingBottom;
<add>
<add> // We divide by the current device pixel ratio, because the canvas is scaled up by that amount in each direction. However
<add> // the backend model is in unscaled coordinates. Since we are going to deal with our model coordinates, we go back here
<add> mouseX = Math.round((mouseX - boundingRect.left - paddingLeft) / (width) * canvasElement.width / chart.currentDevicePixelRatio);
<add> mouseY = Math.round((mouseY - boundingRect.top - paddingTop) / (height) * canvasElement.height / chart.currentDevicePixelRatio);
<add>
<add> return {
<add> x: mouseX,
<add> y: mouseY
<add> };
<add>}
<add>
<add>export function getMaximumWidth(domNode) {
<add> var container = _getParentNode(domNode);
<add> if (!container) {
<add> return domNode.clientWidth;
<add> }
<add>
<add> var clientWidth = container.clientWidth;
<add> var paddingLeft = _calculatePadding(container, 'padding-left', clientWidth);
<add> var paddingRight = _calculatePadding(container, 'padding-right', clientWidth);
<add>
<add> var w = clientWidth - paddingLeft - paddingRight;
<add> var cw = getConstraintWidth(domNode);
<add> return isNaN(cw) ? w : Math.min(w, cw);
<add>}
<add>
<add>export function getMaximumHeight(domNode) {
<add> var container = _getParentNode(domNode);
<add> if (!container) {
<add> return domNode.clientHeight;
<add> }
<add>
<add> var clientHeight = container.clientHeight;
<add> var paddingTop = _calculatePadding(container, 'padding-top', clientHeight);
<add> var paddingBottom = _calculatePadding(container, 'padding-bottom', clientHeight);
<add>
<add> var h = clientHeight - paddingTop - paddingBottom;
<add> var ch = getConstraintHeight(domNode);
<add> return isNaN(ch) ? h : Math.min(h, ch);
<add>}
<add>
<add>export function retinaScale(chart, forceRatio) {
<add> var pixelRatio = chart.currentDevicePixelRatio = forceRatio || (typeof window !== 'undefined' && window.devicePixelRatio) || 1;
<add> if (pixelRatio === 1) {
<add> return;
<add> }
<add>
<add> var canvasElement = chart.canvas;
<add> var height = chart.height;
<add> var width = chart.width;
<add>
<add> canvasElement.height = height * pixelRatio;
<add> canvasElement.width = width * pixelRatio;
<add> chart.ctx.scale(pixelRatio, pixelRatio);
<add>
<add> // If no style has been set on the canvas, the render size is used as display size,
<add> // making the chart visually bigger, so let's enforce it to the "correct" values.
<add> // See https://github.com/chartjs/Chart.js/issues/3575
<add> if (!canvasElement.style.height && !canvasElement.style.width) {
<add> canvasElement.style.height = height + 'px';
<add> canvasElement.style.width = width + 'px';
<add> }
<add>}
<ide><path>src/helpers/helpers.math.js
<ide> 'use strict';
<ide>
<add>import {isFinite as isFiniteNumber} from './helpers.core';
<add>
<ide> /**
<ide> * @alias Chart.helpers.math
<ide> * @namespace
<ide> export const log10 = Math.log10 || function(x) {
<ide>
<ide> return isPowerOf10 ? powerOf10 : exponent;
<ide> };
<add>
<add>
<add>export function isNumber(n) {
<add> return !isNaN(parseFloat(n)) && isFinite(n);
<add>}
<add>
<add>export function almostEquals(x, y, epsilon) {
<add> return Math.abs(x - y) < epsilon;
<add>}
<add>
<add>export function almostWhole(x, epsilon) {
<add> var rounded = Math.round(x);
<add> return ((rounded - epsilon) <= x) && ((rounded + epsilon) >= x);
<add>}
<add>
<add>export function _setMinAndMax(array, target) {
<add> var i, ilen, value;
<add>
<add> for (i = 0, ilen = array.length; i < ilen; i++) {
<add> value = array[i];
<add> if (!isNaN(value)) {
<add> target.min = Math.min(target.min, value);
<add> target.max = Math.max(target.max, value);
<add> }
<add> }
<add>}
<add>
<add>export function _setMinAndMaxByKey(array, target, property) {
<add> var i, ilen, value;
<add>
<add> for (i = 0, ilen = array.length; i < ilen; i++) {
<add> value = array[i][property];
<add> if (!isNaN(value)) {
<add> target.min = Math.min(target.min, value);
<add> target.max = Math.max(target.max, value);
<add> }
<add> }
<add>}
<add>
<add>export const sign = Math.sign ?
<add> function(x) {
<add> return Math.sign(x);
<add> } :
<add> function(x) {
<add> x = +x; // convert to a number
<add> if (x === 0 || isNaN(x)) {
<add> return x;
<add> }
<add> return x > 0 ? 1 : -1;
<add> };
<add>
<add>export function toRadians(degrees) {
<add> return degrees * (Math.PI / 180);
<add>}
<add>
<add>export function toDegrees(radians) {
<add> return radians * (180 / Math.PI);
<add>}
<add>
<add>/**
<add> * Returns the number of decimal places
<add> * i.e. the number of digits after the decimal point, of the value of this Number.
<add> * @param {number} x - A number.
<add> * @returns {number} The number of decimal places.
<add> * @private
<add> */
<add>export function _decimalPlaces(x) {
<add> if (!isFiniteNumber(x)) {
<add> return;
<add> }
<add> var e = 1;
<add> var p = 0;
<add> while (Math.round(x * e) / e !== x) {
<add> e *= 10;
<add> p++;
<add> }
<add> return p;
<add>}
<add>
<add>// Gets the angle from vertical upright to the point about a centre.
<add>export function getAngleFromPoint(centrePoint, anglePoint) {
<add> var distanceFromXCenter = anglePoint.x - centrePoint.x;
<add> var distanceFromYCenter = anglePoint.y - centrePoint.y;
<add> var radialDistanceFromCenter = Math.sqrt(distanceFromXCenter * distanceFromXCenter + distanceFromYCenter * distanceFromYCenter);
<add>
<add> var angle = Math.atan2(distanceFromYCenter, distanceFromXCenter);
<add>
<add> if (angle < (-0.5 * Math.PI)) {
<add> angle += 2.0 * Math.PI; // make sure the returned angle is in the range of (-PI/2, 3PI/2]
<add> }
<add>
<add> return {
<add> angle: angle,
<add> distance: radialDistanceFromCenter
<add> };
<add>}
<add>
<add>export function distanceBetweenPoints(pt1, pt2) {
<add> return Math.sqrt(Math.pow(pt2.x - pt1.x, 2) + Math.pow(pt2.y - pt1.y, 2));
<add>}
<ide><path>src/helpers/index.js
<ide> 'use strict';
<ide>
<add>import color from 'chartjs-color';
<add>
<ide> import * as coreHelpers from './helpers.core';
<ide> import * as canvas from './helpers.canvas';
<add>import * as curve from './helpers.curve';
<add>import * as dom from './helpers.dom';
<ide> import * as easing from './helpers.easing';
<ide> import * as options from './helpers.options';
<ide> import * as math from './helpers.math';
<ide> import * as rtl from './helpers.rtl';
<ide>
<add>const colorHelper = !color ?
<add> function(value) {
<add> console.error('Color.js not found!');
<add> return value;
<add> } :
<add> function(value) {
<add> if (value instanceof CanvasGradient || value instanceof CanvasPattern) {
<add> // TODO: figure out what this should be. Previously returned
<add> // the default color
<add> return value;
<add> }
<add>
<add> return color(value);
<add> };
<add>
<add>function measureText(ctx, data, gc, longest, string) {
<add> var textWidth = data[string];
<add> if (!textWidth) {
<add> textWidth = data[string] = ctx.measureText(string).width;
<add> gc.push(string);
<add> }
<add> if (textWidth > longest) {
<add> longest = textWidth;
<add> }
<add> return longest;
<add>}
<add>
<ide> export default {
<ide> ...coreHelpers,
<ide> canvas,
<add> curve,
<add> dom,
<ide> easing,
<ide> options,
<ide> math,
<ide> rtl,
<add>
<add> where: function(collection, filterCallback) {
<add> if (coreHelpers.isArray(collection) && Array.prototype.filter) {
<add> return collection.filter(filterCallback);
<add> }
<add> var filtered = [];
<add>
<add> coreHelpers.each(collection, function(item) {
<add> if (filterCallback(item)) {
<add> filtered.push(item);
<add> }
<add> });
<add>
<add> return filtered;
<add> },
<add> findIndex: Array.prototype.findIndex ?
<add> function(array, callback, scope) {
<add> return array.findIndex(callback, scope);
<add> } :
<add> function(array, callback, scope) {
<add> scope = scope === undefined ? array : scope;
<add> for (var i = 0, ilen = array.length; i < ilen; ++i) {
<add> if (callback.call(scope, array[i], i, array)) {
<add> return i;
<add> }
<add> }
<add> return -1;
<add> },
<add> findNextWhere: function(arrayToSearch, filterCallback, startIndex) {
<add> // Default to start of the array
<add> if (coreHelpers.isNullOrUndef(startIndex)) {
<add> startIndex = -1;
<add> }
<add> for (var i = startIndex + 1; i < arrayToSearch.length; i++) {
<add> var currentItem = arrayToSearch[i];
<add> if (filterCallback(currentItem)) {
<add> return currentItem;
<add> }
<add> }
<add> },
<add> findPreviousWhere: function(arrayToSearch, filterCallback, startIndex) {
<add> // Default to end of the array
<add> if (coreHelpers.isNullOrUndef(startIndex)) {
<add> startIndex = arrayToSearch.length;
<add> }
<add> for (var i = startIndex - 1; i >= 0; i--) {
<add> var currentItem = arrayToSearch[i];
<add> if (filterCallback(currentItem)) {
<add> return currentItem;
<add> }
<add> }
<add> },
<add> // Implementation of the nice number algorithm used in determining where axis labels will go
<add> niceNum: function(range, round) {
<add> var exponent = Math.floor(math.log10(range));
<add> var fraction = range / Math.pow(10, exponent);
<add> var niceFraction;
<add>
<add> if (round) {
<add> if (fraction < 1.5) {
<add> niceFraction = 1;
<add> } else if (fraction < 3) {
<add> niceFraction = 2;
<add> } else if (fraction < 7) {
<add> niceFraction = 5;
<add> } else {
<add> niceFraction = 10;
<add> }
<add> } else if (fraction <= 1.0) {
<add> niceFraction = 1;
<add> } else if (fraction <= 2) {
<add> niceFraction = 2;
<add> } else if (fraction <= 5) {
<add> niceFraction = 5;
<add> } else {
<add> niceFraction = 10;
<add> }
<add>
<add> return niceFraction * Math.pow(10, exponent);
<add> },
<add> // Request animation polyfill - https://www.paulirish.com/2011/requestanimationframe-for-smart-animating/
<add> requestAnimFrame: (function() {
<add> if (typeof window === 'undefined') {
<add> return function(callback) {
<add> callback();
<add> };
<add> }
<add> return window.requestAnimationFrame ||
<add> window.webkitRequestAnimationFrame ||
<add> window.mozRequestAnimationFrame ||
<add> window.oRequestAnimationFrame ||
<add> window.msRequestAnimationFrame ||
<add> function(callback) {
<add> return window.setTimeout(callback, 1000 / 60);
<add> };
<add> }()),
<add> // -- Canvas methods
<add> fontString: function(pixelSize, fontStyle, fontFamily) {
<add> return fontStyle + ' ' + pixelSize + 'px ' + fontFamily;
<add> },
<add> longestText: function(ctx, font, arrayOfThings, cache) {
<add> cache = cache || {};
<add> var data = cache.data = cache.data || {};
<add> var gc = cache.garbageCollect = cache.garbageCollect || [];
<add>
<add> if (cache.font !== font) {
<add> data = cache.data = {};
<add> gc = cache.garbageCollect = [];
<add> cache.font = font;
<add> }
<add>
<add> ctx.font = font;
<add> var longest = 0;
<add> var ilen = arrayOfThings.length;
<add> var i, j, jlen, thing, nestedThing;
<add> for (i = 0; i < ilen; i++) {
<add> thing = arrayOfThings[i];
<add>
<add> // Undefined strings and arrays should not be measured
<add> if (thing !== undefined && thing !== null && coreHelpers.isArray(thing) !== true) {
<add> longest = measureText(ctx, data, gc, longest, thing);
<add> } else if (coreHelpers.isArray(thing)) {
<add> // if it is an array lets measure each element
<add> // to do maybe simplify this function a bit so we can do this more recursively?
<add> for (j = 0, jlen = thing.length; j < jlen; j++) {
<add> nestedThing = thing[j];
<add> // Undefined strings and arrays should not be measured
<add> if (nestedThing !== undefined && nestedThing !== null && !coreHelpers.isArray(nestedThing)) {
<add> longest = measureText(ctx, data, gc, longest, nestedThing);
<add> }
<add> }
<add> }
<add> }
<add>
<add> var gcLen = gc.length / 2;
<add> if (gcLen > arrayOfThings.length) {
<add> for (i = 0; i < gcLen; i++) {
<add> delete data[gc[i]];
<add> }
<add> gc.splice(0, gcLen);
<add> }
<add> return longest;
<add> },
<add> measureText,
<add> color: colorHelper,
<add> getHoverColor: function(colorValue) {
<add> return (colorValue instanceof CanvasPattern || colorValue instanceof CanvasGradient) ?
<add> colorValue :
<add> colorHelper(colorValue).saturate(0.5).darken(0.1).rgbString();
<add> }
<ide> };
<ide><path>src/index.js
<ide> var Chart = require('./core/core.controller');
<ide>
<ide> Chart.helpers = require('./helpers/index');
<del>
<del>// @todo dispatch these helpers into appropriated helpers/helpers.* file and write unit tests!
<del>require('./core/core.helpers')(Chart);
<del>
<ide> Chart._adapters = require('./core/core.adapters');
<ide> Chart.Animation = require('./core/core.animation');
<ide> Chart.animationService = require('./core/core.animations');
<ide><path>src/platforms/platform.dom.js
<ide> var EVENT_TYPES = {
<ide> * @returns {number} Size in pixels or undefined if unknown.
<ide> */
<ide> function readUsedSize(element, property) {
<del> var value = helpers.getStyle(element, property);
<add> var value = helpers.dom.getStyle(element, property);
<ide> var matches = value && value.match(/^(\d+)(\.\d+)?px$/);
<ide> return matches ? Number(matches[1]) : undefined;
<ide> }
<ide> function createEvent(type, chart, x, y, nativeEvent) {
<ide>
<ide> function fromNativeEvent(event, chart) {
<ide> var type = EVENT_TYPES[event.type] || event.type;
<del> var pos = helpers.getRelativePosition(event, chart);
<add> var pos = helpers.dom.getRelativePosition(event, chart);
<ide> return createEvent(type, chart, pos.x, pos.y, event);
<ide> }
<ide>
<ide><path>src/scales/scale.linearbase.js
<ide> 'use strict';
<ide>
<ide> import helpers from '../helpers/index';
<add>import {almostEquals, almostWhole, _decimalPlaces, _setMinAndMaxByKey, sign} from '../helpers/helpers.math';
<ide> import Scale from '../core/core.scale';
<ide>
<ide> const isNullOrUndef = helpers.isNullOrUndef;
<ide> function generateTicks(generationOptions, dataRange) {
<ide>
<ide> if (stepSize || isNullOrUndef(precision)) {
<ide> // If a precision is not specified, calculate factor based on spacing
<del> factor = Math.pow(10, helpers._decimalPlaces(spacing));
<add> factor = Math.pow(10, _decimalPlaces(spacing));
<ide> } else {
<ide> // If the user specified a precision, round to that number of decimal places
<ide> factor = Math.pow(10, precision);
<ide> function generateTicks(generationOptions, dataRange) {
<ide> // If min, max and stepSize is set and they make an evenly spaced scale use it.
<ide> if (stepSize) {
<ide> // If very close to our whole number, use it.
<del> if (!isNullOrUndef(min) && helpers.almostWhole(min / spacing, spacing / 1000)) {
<add> if (!isNullOrUndef(min) && almostWhole(min / spacing, spacing / 1000)) {
<ide> niceMin = min;
<ide> }
<del> if (!isNullOrUndef(max) && helpers.almostWhole(max / spacing, spacing / 1000)) {
<add> if (!isNullOrUndef(max) && almostWhole(max / spacing, spacing / 1000)) {
<ide> niceMax = max;
<ide> }
<ide> }
<ide>
<ide> numSpaces = (niceMax - niceMin) / spacing;
<ide> // If very close to our rounded value, use it.
<del> if (helpers.almostEquals(numSpaces, Math.round(numSpaces), spacing / 1000)) {
<add> if (almostEquals(numSpaces, Math.round(numSpaces), spacing / 1000)) {
<ide> numSpaces = Math.round(numSpaces);
<ide> } else {
<ide> numSpaces = Math.ceil(numSpaces);
<ide> class LinearScaleBase extends Scale {
<ide> // do nothing since that would make the chart weird. If the user really wants a weird chart
<ide> // axis, they can manually override it
<ide> if (opts.beginAtZero) {
<del> var minSign = helpers.sign(me.min);
<del> var maxSign = helpers.sign(me.max);
<add> var minSign = sign(me.min);
<add> var maxSign = sign(me.max);
<ide>
<ide> if (minSign < 0 && maxSign < 0) {
<ide> // move the top up to 0
<ide> class LinearScaleBase extends Scale {
<ide>
<ide> // At this point, we need to update our max and min given the tick values since we have expanded the
<ide> // range of the scale
<del> helpers._setMinAndMaxByKey(ticks, me, 'value');
<add> _setMinAndMaxByKey(ticks, me, 'value');
<ide>
<ide> if (opts.reverse) {
<ide> ticks.reverse();
<ide><path>src/scales/scale.logarithmic.js
<ide>
<ide> import defaults from '../core/core.defaults';
<ide> import helpers from '../helpers/index';
<add>import {_setMinAndMaxByKey} from '../helpers/helpers.math';
<ide> import Scale from '../core/core.scale';
<ide> import LinearScaleBase from './scale.linearbase';
<ide> import Ticks from '../core/core.ticks';
<ide> class LogarithmicScale extends Scale {
<ide>
<ide> // At this point, we need to update our max and min given the tick values since we have expanded the
<ide> // range of the scale
<del> helpers._setMinAndMaxByKey(ticks, me, 'value');
<add> _setMinAndMaxByKey(ticks, me, 'value');
<ide>
<ide> if (opts.reverse) {
<ide> reverse = !reverse;
<ide><path>src/scales/scale.radialLinear.js
<ide>
<ide> import defaults from '../core/core.defaults';
<ide> import helpers from '../helpers/index';
<add>import {isNumber, toDegrees} from '../helpers/helpers.math';
<ide> import LinearScaleBase from './scale.linearbase';
<ide> import Ticks from '../core/core.ticks';
<ide>
<ide> function fitWithPointLabels(scale) {
<ide>
<ide> // Add quarter circle to make degree 0 mean top of circle
<ide> var angleRadians = scale.getIndexAngle(i);
<del> var angle = helpers.toDegrees(angleRadians) % 360;
<add> var angle = toDegrees(angleRadians) % 360;
<ide> var hLimits = determineLimits(angle, pointPosition.x, textSize.w, 0, 180);
<ide> var vLimits = determineLimits(angle, pointPosition.y, textSize.h, 90, 270);
<ide>
<ide> function drawPointLabels(scale) {
<ide> ctx.fillStyle = pointLabelFontColor;
<ide>
<ide> var angleRadians = scale.getIndexAngle(i);
<del> var angle = helpers.toDegrees(angleRadians);
<add> var angle = toDegrees(angleRadians);
<ide> ctx.textAlign = getTextAlignForAngle(angle);
<ide> adjustPointPositionForLabelHeight(angle, scale._pointLabelSizes[i], pointLabelPosition);
<ide> fillText(ctx, scale.pointLabels[i], pointLabelPosition, plFont.lineHeight);
<ide> function drawRadiusLine(scale, gridLineOpts, radius, index) {
<ide> }
<ide>
<ide> function numberOrZero(param) {
<del> return helpers.isNumber(param) ? param : 0;
<add> return isNumber(param) ? param : 0;
<ide> }
<ide>
<ide> class RadialLinearScale extends LinearScaleBase {
<ide><path>src/scales/scale.time.js
<ide> import adapters from '../core/core.adapters';
<ide> import defaults from '../core/core.defaults';
<ide> import helpers from '../helpers/index';
<add>import {toRadians} from '../helpers/helpers.math';
<ide> import Scale from '../core/core.scale';
<ide>
<ide> const resolve = helpers.options.resolve;
<ide> class TimeScale extends Scale {
<ide> const me = this;
<ide> const ticksOpts = me.options.ticks;
<ide> const tickLabelWidth = me.ctx.measureText(label).width;
<del> const angle = helpers.toRadians(me.isHorizontal() ? ticksOpts.maxRotation : ticksOpts.minRotation);
<add> const angle = toRadians(me.isHorizontal() ? ticksOpts.maxRotation : ticksOpts.minRotation);
<ide> const cosRotation = Math.cos(angle);
<ide> const sinRotation = Math.sin(angle);
<ide> const tickFontSize = valueOrDefault(ticksOpts.fontSize, defaults.global.defaultFontSize);
<ide><path>test/specs/core.helpers.tests.js
<ide> describe('Core helper tests', function() {
<ide> expect(helpers.findPreviousWhere(data, callback, 0)).toBe(undefined);
<ide> });
<ide>
<del> it('should get the correct sign', function() {
<del> expect(helpers.sign(0)).toBe(0);
<del> expect(helpers.sign(10)).toBe(1);
<del> expect(helpers.sign(-5)).toBe(-1);
<del> });
<del>
<del> it('should correctly determine if two numbers are essentially equal', function() {
<del> expect(helpers.almostEquals(0, Number.EPSILON, 2 * Number.EPSILON)).toBe(true);
<del> expect(helpers.almostEquals(1, 1.1, 0.0001)).toBe(false);
<del> expect(helpers.almostEquals(1e30, 1e30 + Number.EPSILON, 0)).toBe(false);
<del> expect(helpers.almostEquals(1e30, 1e30 + Number.EPSILON, 2 * Number.EPSILON)).toBe(true);
<del> });
<del>
<del> it('should correctly determine if a numbers are essentially whole', function() {
<del> expect(helpers.almostWhole(0.99999, 0.0001)).toBe(true);
<del> expect(helpers.almostWhole(0.9, 0.0001)).toBe(false);
<del> expect(helpers.almostWhole(1234567890123, 0.0001)).toBe(true);
<del> expect(helpers.almostWhole(1234567890123.001, 0.0001)).toBe(false);
<del> });
<del>
<ide> it('should generate integer ids', function() {
<ide> var uid = helpers.uid();
<ide> expect(uid).toEqual(jasmine.any(Number));
<ide> describe('Core helper tests', function() {
<ide> expect(helpers.uid()).toBe(uid + 3);
<ide> });
<ide>
<del> it('should detect a number', function() {
<del> expect(helpers.isNumber(123)).toBe(true);
<del> expect(helpers.isNumber('123')).toBe(true);
<del> expect(helpers.isNumber(null)).toBe(false);
<del> expect(helpers.isNumber(NaN)).toBe(false);
<del> expect(helpers.isNumber(undefined)).toBe(false);
<del> expect(helpers.isNumber('cbc')).toBe(false);
<del> });
<del>
<del> it('should convert between radians and degrees', function() {
<del> expect(helpers.toRadians(180)).toBe(Math.PI);
<del> expect(helpers.toRadians(90)).toBe(0.5 * Math.PI);
<del> expect(helpers.toDegrees(Math.PI)).toBe(180);
<del> expect(helpers.toDegrees(Math.PI * 3 / 2)).toBe(270);
<del> });
<del>
<del> it('should get the correct number of decimal places', function() {
<del> expect(helpers._decimalPlaces(100)).toBe(0);
<del> expect(helpers._decimalPlaces(1)).toBe(0);
<del> expect(helpers._decimalPlaces(0)).toBe(0);
<del> expect(helpers._decimalPlaces(0.01)).toBe(2);
<del> expect(helpers._decimalPlaces(-0.01)).toBe(2);
<del> expect(helpers._decimalPlaces('1')).toBe(undefined);
<del> expect(helpers._decimalPlaces('')).toBe(undefined);
<del> expect(helpers._decimalPlaces(undefined)).toBe(undefined);
<del> expect(helpers._decimalPlaces(12345678.1234)).toBe(4);
<del> expect(helpers._decimalPlaces(1234567890.1234567)).toBe(7);
<del> });
<del>
<del> it('should get an angle from a point', function() {
<del> var center = {
<del> x: 0,
<del> y: 0
<del> };
<del>
<del> expect(helpers.getAngleFromPoint(center, {
<del> x: 0,
<del> y: 10
<del> })).toEqual({
<del> angle: Math.PI / 2,
<del> distance: 10,
<del> });
<del>
<del> expect(helpers.getAngleFromPoint(center, {
<del> x: Math.sqrt(2),
<del> y: Math.sqrt(2)
<del> })).toEqual({
<del> angle: Math.PI / 4,
<del> distance: 2
<del> });
<del>
<del> expect(helpers.getAngleFromPoint(center, {
<del> x: -1.0 * Math.sqrt(2),
<del> y: -1.0 * Math.sqrt(2)
<del> })).toEqual({
<del> angle: Math.PI * 1.25,
<del> distance: 2
<del> });
<del> });
<del>
<del> it('should spline curves', function() {
<del> expect(helpers.splineCurve({
<del> x: 0,
<del> y: 0
<del> }, {
<del> x: 1,
<del> y: 1
<del> }, {
<del> x: 2,
<del> y: 0
<del> }, 0)).toEqual({
<del> previous: {
<del> x: 1,
<del> y: 1,
<del> },
<del> next: {
<del> x: 1,
<del> y: 1,
<del> }
<del> });
<del>
<del> expect(helpers.splineCurve({
<del> x: 0,
<del> y: 0
<del> }, {
<del> x: 1,
<del> y: 1
<del> }, {
<del> x: 2,
<del> y: 0
<del> }, 1)).toEqual({
<del> previous: {
<del> x: 0,
<del> y: 1,
<del> },
<del> next: {
<del> x: 2,
<del> y: 1,
<del> }
<del> });
<del> });
<del>
<del> it('should spline curves with monotone cubic interpolation', function() {
<del> var dataPoints = [
<del> {_model: {x: 0, y: 0, skip: false}},
<del> {_model: {x: 3, y: 6, skip: false}},
<del> {_model: {x: 9, y: 6, skip: false}},
<del> {_model: {x: 12, y: 60, skip: false}},
<del> {_model: {x: 15, y: 60, skip: false}},
<del> {_model: {x: 18, y: 120, skip: false}},
<del> {_model: {x: null, y: null, skip: true}},
<del> {_model: {x: 21, y: 180, skip: false}},
<del> {_model: {x: 24, y: 120, skip: false}},
<del> {_model: {x: 27, y: 125, skip: false}},
<del> {_model: {x: 30, y: 105, skip: false}},
<del> {_model: {x: 33, y: 110, skip: false}},
<del> {_model: {x: 33, y: 110, skip: false}},
<del> {_model: {x: 36, y: 170, skip: false}}
<del> ];
<del> helpers.splineCurveMonotone(dataPoints);
<del> expect(dataPoints).toEqual([{
<del> _model: {
<del> x: 0,
<del> y: 0,
<del> skip: false,
<del> controlPointNextX: 1,
<del> controlPointNextY: 2
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 3,
<del> y: 6,
<del> skip: false,
<del> controlPointPreviousX: 2,
<del> controlPointPreviousY: 6,
<del> controlPointNextX: 5,
<del> controlPointNextY: 6
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 9,
<del> y: 6,
<del> skip: false,
<del> controlPointPreviousX: 7,
<del> controlPointPreviousY: 6,
<del> controlPointNextX: 10,
<del> controlPointNextY: 6
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 12,
<del> y: 60,
<del> skip: false,
<del> controlPointPreviousX: 11,
<del> controlPointPreviousY: 60,
<del> controlPointNextX: 13,
<del> controlPointNextY: 60
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 15,
<del> y: 60,
<del> skip: false,
<del> controlPointPreviousX: 14,
<del> controlPointPreviousY: 60,
<del> controlPointNextX: 16,
<del> controlPointNextY: 60
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 18,
<del> y: 120,
<del> skip: false,
<del> controlPointPreviousX: 17,
<del> controlPointPreviousY: 100
<del> }
<del> },
<del> {
<del> _model: {
<del> x: null,
<del> y: null,
<del> skip: true
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 21,
<del> y: 180,
<del> skip: false,
<del> controlPointNextX: 22,
<del> controlPointNextY: 160
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 24,
<del> y: 120,
<del> skip: false,
<del> controlPointPreviousX: 23,
<del> controlPointPreviousY: 120,
<del> controlPointNextX: 25,
<del> controlPointNextY: 120
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 27,
<del> y: 125,
<del> skip: false,
<del> controlPointPreviousX: 26,
<del> controlPointPreviousY: 125,
<del> controlPointNextX: 28,
<del> controlPointNextY: 125
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 30,
<del> y: 105,
<del> skip: false,
<del> controlPointPreviousX: 29,
<del> controlPointPreviousY: 105,
<del> controlPointNextX: 31,
<del> controlPointNextY: 105
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 33,
<del> y: 110,
<del> skip: false,
<del> controlPointPreviousX: 32,
<del> controlPointPreviousY: 110,
<del> controlPointNextX: 33,
<del> controlPointNextY: 110
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 33,
<del> y: 110,
<del> skip: false,
<del> controlPointPreviousX: 33,
<del> controlPointPreviousY: 110,
<del> controlPointNextX: 34,
<del> controlPointNextY: 110
<del> }
<del> },
<del> {
<del> _model: {
<del> x: 36,
<del> y: 170,
<del> skip: false,
<del> controlPointPreviousX: 35,
<del> controlPointPreviousY: 150
<del> }
<del> }]);
<del> });
<del>
<ide> it('should return the width of the longest text in an Array and 2D Array', function() {
<ide> var context = window.createMockContext();
<ide> var font = "normal 12px 'Helvetica Neue', 'Helvetica', 'Arial', sans-serif";
<ide> describe('Core helper tests', function() {
<ide> }]);
<ide> });
<ide>
<del> it ('should get the maximum width and height for a node', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create the div we want to get the max size for
<del> var innerDiv = document.createElement('div');
<del> div.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumWidth(innerDiv)).toBe(200);
<del> expect(helpers.getMaximumHeight(innerDiv)).toBe(300);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum width and height for a node in a ShadowRoot', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> if (!div.attachShadow) {
<del> // Shadow DOM is not natively supported
<del> return;
<del> }
<del>
<del> var shadow = div.attachShadow({mode: 'closed'});
<del>
<del> // Create the div we want to get the max size for
<del> var innerDiv = document.createElement('div');
<del> shadow.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumWidth(innerDiv)).toBe(200);
<del> expect(helpers.getMaximumHeight(innerDiv)).toBe(300);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum width of a node that has a max-width style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create the div we want to get the max size for and set a max-width style
<del> var innerDiv = document.createElement('div');
<del> innerDiv.style.maxWidth = '150px';
<del> div.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumWidth(innerDiv)).toBe(150);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum height of a node that has a max-height style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create the div we want to get the max size for and set a max-height style
<del> var innerDiv = document.createElement('div');
<del> innerDiv.style.maxHeight = '150px';
<del> div.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum width of a node when the parent has a max-width style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create an inner wrapper around our div we want to size and give that a max-width style
<del> var parentDiv = document.createElement('div');
<del> parentDiv.style.maxWidth = '150px';
<del> div.appendChild(parentDiv);
<del>
<del> // Create the div we want to get the max size for
<del> var innerDiv = document.createElement('div');
<del> parentDiv.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumWidth(innerDiv)).toBe(150);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum height of a node when the parent has a max-height style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create an inner wrapper around our div we want to size and give that a max-height style
<del> var parentDiv = document.createElement('div');
<del> parentDiv.style.maxHeight = '150px';
<del> div.appendChild(parentDiv);
<del>
<del> // Create the div we want to get the max size for
<del> var innerDiv = document.createElement('div');
<del> innerDiv.style.height = '300px'; // make it large
<del> parentDiv.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum width of a node that has a percentage max-width style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create the div we want to get the max size for and set a max-width style
<del> var innerDiv = document.createElement('div');
<del> innerDiv.style.maxWidth = '50%';
<del> div.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumWidth(innerDiv)).toBe(100);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum height of a node that has a percentage max-height style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create the div we want to get the max size for and set a max-height style
<del> var innerDiv = document.createElement('div');
<del> innerDiv.style.maxHeight = '50%';
<del> div.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum width of a node when the parent has a percentage max-width style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create an inner wrapper around our div we want to size and give that a max-width style
<del> var parentDiv = document.createElement('div');
<del> parentDiv.style.maxWidth = '50%';
<del> div.appendChild(parentDiv);
<del>
<del> // Create the div we want to get the max size for
<del> var innerDiv = document.createElement('div');
<del> parentDiv.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumWidth(innerDiv)).toBe(100);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should get the maximum height of a node when the parent has a percentage max-height style', function() {
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '200px';
<del> div.style.height = '300px';
<del>
<del> document.body.appendChild(div);
<del>
<del> // Create an inner wrapper around our div we want to size and give that a max-height style
<del> var parentDiv = document.createElement('div');
<del> parentDiv.style.maxHeight = '50%';
<del> div.appendChild(parentDiv);
<del>
<del> var innerDiv = document.createElement('div');
<del> innerDiv.style.height = '300px'; // make it large
<del> parentDiv.appendChild(innerDiv);
<del>
<del> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<del> it ('should leave styled height and width on canvas if explicitly set', function() {
<del> var chart = window.acquireChart({}, {
<del> canvas: {
<del> height: 200,
<del> width: 200,
<del> style: 'height: 400px; width: 400px;'
<del> }
<del> });
<del>
<del> helpers.retinaScale(chart, true);
<del>
<del> var canvas = chart.canvas;
<del>
<del> expect(canvas.style.height).toBe('400px');
<del> expect(canvas.style.width).toBe('400px');
<del> });
<del>
<del> it ('Should get padding of parent as number (pixels) when defined as percent (returns incorrectly in IE11)', function() {
<del>
<del> // Create div with fixed size as a test bed
<del> var div = document.createElement('div');
<del> div.style.width = '300px';
<del> div.style.height = '300px';
<del> document.body.appendChild(div);
<del>
<del> // Inner DIV to have 5% padding of parent
<del> var innerDiv = document.createElement('div');
<del>
<del> div.appendChild(innerDiv);
<del>
<del> var canvas = document.createElement('canvas');
<del> innerDiv.appendChild(canvas);
<del>
<del> // No padding
<del> expect(helpers.getMaximumWidth(canvas)).toBe(300);
<del>
<del> // test with percentage
<del> innerDiv.style.padding = '5%';
<del> expect(helpers.getMaximumWidth(canvas)).toBe(270);
<del>
<del> // test with pixels
<del> innerDiv.style.padding = '10px';
<del> expect(helpers.getMaximumWidth(canvas)).toBe(280);
<del>
<del> document.body.removeChild(div);
<del> });
<del>
<ide> describe('Color helper', function() {
<ide> function isColorInstance(obj) {
<ide> return typeof obj === 'object' && Object.prototype.hasOwnProperty.call(obj, 'values') && Object.prototype.hasOwnProperty.call(obj.values, 'rgb');
<ide> describe('Core helper tests', function() {
<ide> it('should return a color when called with a color', function() {
<ide> expect(isColorInstance(helpers.color('rgb(1, 2, 3)'))).toBe(true);
<ide> });
<del>
<del> it('should return a color when called with a CanvasGradient instance', function() {
<del> var context = document.createElement('canvas').getContext('2d');
<del> var gradient = context.createLinearGradient(0, 1, 2, 3);
<del>
<del> expect(isColorInstance(helpers.color(gradient))).toBe(true);
<del> });
<ide> });
<ide>
<ide> describe('Background hover color helper', function() {
<ide><path>test/specs/helpers.curve.tests.js
<add>describe('Curve helper tests', function() {
<add> let helpers;
<add>
<add> beforeAll(function() {
<add> helpers = window.Chart.helpers.curve;
<add> });
<add>
<add> it('should spline curves', function() {
<add> expect(helpers.splineCurve({
<add> x: 0,
<add> y: 0
<add> }, {
<add> x: 1,
<add> y: 1
<add> }, {
<add> x: 2,
<add> y: 0
<add> }, 0)).toEqual({
<add> previous: {
<add> x: 1,
<add> y: 1,
<add> },
<add> next: {
<add> x: 1,
<add> y: 1,
<add> }
<add> });
<add>
<add> expect(helpers.splineCurve({
<add> x: 0,
<add> y: 0
<add> }, {
<add> x: 1,
<add> y: 1
<add> }, {
<add> x: 2,
<add> y: 0
<add> }, 1)).toEqual({
<add> previous: {
<add> x: 0,
<add> y: 1,
<add> },
<add> next: {
<add> x: 2,
<add> y: 1,
<add> }
<add> });
<add> });
<add>
<add> it('should spline curves with monotone cubic interpolation', function() {
<add> var dataPoints = [
<add> {_model: {x: 0, y: 0, skip: false}},
<add> {_model: {x: 3, y: 6, skip: false}},
<add> {_model: {x: 9, y: 6, skip: false}},
<add> {_model: {x: 12, y: 60, skip: false}},
<add> {_model: {x: 15, y: 60, skip: false}},
<add> {_model: {x: 18, y: 120, skip: false}},
<add> {_model: {x: null, y: null, skip: true}},
<add> {_model: {x: 21, y: 180, skip: false}},
<add> {_model: {x: 24, y: 120, skip: false}},
<add> {_model: {x: 27, y: 125, skip: false}},
<add> {_model: {x: 30, y: 105, skip: false}},
<add> {_model: {x: 33, y: 110, skip: false}},
<add> {_model: {x: 33, y: 110, skip: false}},
<add> {_model: {x: 36, y: 170, skip: false}}
<add> ];
<add> helpers.splineCurveMonotone(dataPoints);
<add> expect(dataPoints).toEqual([{
<add> _model: {
<add> x: 0,
<add> y: 0,
<add> skip: false,
<add> controlPointNextX: 1,
<add> controlPointNextY: 2
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 3,
<add> y: 6,
<add> skip: false,
<add> controlPointPreviousX: 2,
<add> controlPointPreviousY: 6,
<add> controlPointNextX: 5,
<add> controlPointNextY: 6
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 9,
<add> y: 6,
<add> skip: false,
<add> controlPointPreviousX: 7,
<add> controlPointPreviousY: 6,
<add> controlPointNextX: 10,
<add> controlPointNextY: 6
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 12,
<add> y: 60,
<add> skip: false,
<add> controlPointPreviousX: 11,
<add> controlPointPreviousY: 60,
<add> controlPointNextX: 13,
<add> controlPointNextY: 60
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 15,
<add> y: 60,
<add> skip: false,
<add> controlPointPreviousX: 14,
<add> controlPointPreviousY: 60,
<add> controlPointNextX: 16,
<add> controlPointNextY: 60
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 18,
<add> y: 120,
<add> skip: false,
<add> controlPointPreviousX: 17,
<add> controlPointPreviousY: 100
<add> }
<add> },
<add> {
<add> _model: {
<add> x: null,
<add> y: null,
<add> skip: true
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 21,
<add> y: 180,
<add> skip: false,
<add> controlPointNextX: 22,
<add> controlPointNextY: 160
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 24,
<add> y: 120,
<add> skip: false,
<add> controlPointPreviousX: 23,
<add> controlPointPreviousY: 120,
<add> controlPointNextX: 25,
<add> controlPointNextY: 120
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 27,
<add> y: 125,
<add> skip: false,
<add> controlPointPreviousX: 26,
<add> controlPointPreviousY: 125,
<add> controlPointNextX: 28,
<add> controlPointNextY: 125
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 30,
<add> y: 105,
<add> skip: false,
<add> controlPointPreviousX: 29,
<add> controlPointPreviousY: 105,
<add> controlPointNextX: 31,
<add> controlPointNextY: 105
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 33,
<add> y: 110,
<add> skip: false,
<add> controlPointPreviousX: 32,
<add> controlPointPreviousY: 110,
<add> controlPointNextX: 33,
<add> controlPointNextY: 110
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 33,
<add> y: 110,
<add> skip: false,
<add> controlPointPreviousX: 33,
<add> controlPointPreviousY: 110,
<add> controlPointNextX: 34,
<add> controlPointNextY: 110
<add> }
<add> },
<add> {
<add> _model: {
<add> x: 36,
<add> y: 170,
<add> skip: false,
<add> controlPointPreviousX: 35,
<add> controlPointPreviousY: 150
<add> }
<add> }]);
<add> });
<add>});
<ide><path>test/specs/helpers.dom.tests.js
<add>describe('DOM helpers tests', function() {
<add> let helpers;
<add>
<add> beforeAll(function() {
<add> helpers = window.Chart.helpers.dom;
<add> });
<add>
<add> it ('should get the maximum width and height for a node', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create the div we want to get the max size for
<add> var innerDiv = document.createElement('div');
<add> div.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumWidth(innerDiv)).toBe(200);
<add> expect(helpers.getMaximumHeight(innerDiv)).toBe(300);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum width and height for a node in a ShadowRoot', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> if (!div.attachShadow) {
<add> // Shadow DOM is not natively supported
<add> return;
<add> }
<add>
<add> var shadow = div.attachShadow({mode: 'closed'});
<add>
<add> // Create the div we want to get the max size for
<add> var innerDiv = document.createElement('div');
<add> shadow.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumWidth(innerDiv)).toBe(200);
<add> expect(helpers.getMaximumHeight(innerDiv)).toBe(300);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum width of a node that has a max-width style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create the div we want to get the max size for and set a max-width style
<add> var innerDiv = document.createElement('div');
<add> innerDiv.style.maxWidth = '150px';
<add> div.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumWidth(innerDiv)).toBe(150);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum height of a node that has a max-height style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create the div we want to get the max size for and set a max-height style
<add> var innerDiv = document.createElement('div');
<add> innerDiv.style.maxHeight = '150px';
<add> div.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum width of a node when the parent has a max-width style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create an inner wrapper around our div we want to size and give that a max-width style
<add> var parentDiv = document.createElement('div');
<add> parentDiv.style.maxWidth = '150px';
<add> div.appendChild(parentDiv);
<add>
<add> // Create the div we want to get the max size for
<add> var innerDiv = document.createElement('div');
<add> parentDiv.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumWidth(innerDiv)).toBe(150);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum height of a node when the parent has a max-height style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create an inner wrapper around our div we want to size and give that a max-height style
<add> var parentDiv = document.createElement('div');
<add> parentDiv.style.maxHeight = '150px';
<add> div.appendChild(parentDiv);
<add>
<add> // Create the div we want to get the max size for
<add> var innerDiv = document.createElement('div');
<add> innerDiv.style.height = '300px'; // make it large
<add> parentDiv.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum width of a node that has a percentage max-width style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create the div we want to get the max size for and set a max-width style
<add> var innerDiv = document.createElement('div');
<add> innerDiv.style.maxWidth = '50%';
<add> div.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumWidth(innerDiv)).toBe(100);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum height of a node that has a percentage max-height style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create the div we want to get the max size for and set a max-height style
<add> var innerDiv = document.createElement('div');
<add> innerDiv.style.maxHeight = '50%';
<add> div.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum width of a node when the parent has a percentage max-width style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create an inner wrapper around our div we want to size and give that a max-width style
<add> var parentDiv = document.createElement('div');
<add> parentDiv.style.maxWidth = '50%';
<add> div.appendChild(parentDiv);
<add>
<add> // Create the div we want to get the max size for
<add> var innerDiv = document.createElement('div');
<add> parentDiv.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumWidth(innerDiv)).toBe(100);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should get the maximum height of a node when the parent has a percentage max-height style', function() {
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '200px';
<add> div.style.height = '300px';
<add>
<add> document.body.appendChild(div);
<add>
<add> // Create an inner wrapper around our div we want to size and give that a max-height style
<add> var parentDiv = document.createElement('div');
<add> parentDiv.style.maxHeight = '50%';
<add> div.appendChild(parentDiv);
<add>
<add> var innerDiv = document.createElement('div');
<add> innerDiv.style.height = '300px'; // make it large
<add> parentDiv.appendChild(innerDiv);
<add>
<add> expect(helpers.getMaximumHeight(innerDiv)).toBe(150);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('Should get padding of parent as number (pixels) when defined as percent (returns incorrectly in IE11)', function() {
<add>
<add> // Create div with fixed size as a test bed
<add> var div = document.createElement('div');
<add> div.style.width = '300px';
<add> div.style.height = '300px';
<add> document.body.appendChild(div);
<add>
<add> // Inner DIV to have 5% padding of parent
<add> var innerDiv = document.createElement('div');
<add>
<add> div.appendChild(innerDiv);
<add>
<add> var canvas = document.createElement('canvas');
<add> innerDiv.appendChild(canvas);
<add>
<add> // No padding
<add> expect(helpers.getMaximumWidth(canvas)).toBe(300);
<add>
<add> // test with percentage
<add> innerDiv.style.padding = '5%';
<add> expect(helpers.getMaximumWidth(canvas)).toBe(270);
<add>
<add> // test with pixels
<add> innerDiv.style.padding = '10px';
<add> expect(helpers.getMaximumWidth(canvas)).toBe(280);
<add>
<add> document.body.removeChild(div);
<add> });
<add>
<add> it ('should leave styled height and width on canvas if explicitly set', function() {
<add> var chart = window.acquireChart({}, {
<add> canvas: {
<add> height: 200,
<add> width: 200,
<add> style: 'height: 400px; width: 400px;'
<add> }
<add> });
<add>
<add> helpers.retinaScale(chart, true);
<add>
<add> var canvas = chart.canvas;
<add>
<add> expect(canvas.style.height).toBe('400px');
<add> expect(canvas.style.width).toBe('400px');
<add> });
<add>
<add>});
<ide><path>test/specs/helpers.math.tests.js
<ide> describe('Chart.helpers.math', function() {
<ide> var math = Chart.helpers.math;
<ide> var factorize = math._factorize;
<add> var decimalPlaces = math._decimalPlaces;
<ide>
<ide> it('should factorize', function() {
<ide> expect(factorize(1000)).toEqual([1, 2, 4, 5, 8, 10, 20, 25, 40, 50, 100, 125, 200, 250, 500]);
<ide> describe('Chart.helpers.math', function() {
<ide> expect(math.log10(Math.pow(10, i))).toBe(i);
<ide> }
<ide> });
<add>
<add> it('should get the correct number of decimal places', function() {
<add> expect(decimalPlaces(100)).toBe(0);
<add> expect(decimalPlaces(1)).toBe(0);
<add> expect(decimalPlaces(0)).toBe(0);
<add> expect(decimalPlaces(0.01)).toBe(2);
<add> expect(decimalPlaces(-0.01)).toBe(2);
<add> expect(decimalPlaces('1')).toBe(undefined);
<add> expect(decimalPlaces('')).toBe(undefined);
<add> expect(decimalPlaces(undefined)).toBe(undefined);
<add> expect(decimalPlaces(12345678.1234)).toBe(4);
<add> expect(decimalPlaces(1234567890.1234567)).toBe(7);
<add> });
<add>
<add> it('should get an angle from a point', function() {
<add> var center = {
<add> x: 0,
<add> y: 0
<add> };
<add>
<add> expect(math.getAngleFromPoint(center, {
<add> x: 0,
<add> y: 10
<add> })).toEqual({
<add> angle: Math.PI / 2,
<add> distance: 10,
<add> });
<add>
<add> expect(math.getAngleFromPoint(center, {
<add> x: Math.sqrt(2),
<add> y: Math.sqrt(2)
<add> })).toEqual({
<add> angle: Math.PI / 4,
<add> distance: 2
<add> });
<add>
<add> expect(math.getAngleFromPoint(center, {
<add> x: -1.0 * Math.sqrt(2),
<add> y: -1.0 * Math.sqrt(2)
<add> })).toEqual({
<add> angle: Math.PI * 1.25,
<add> distance: 2
<add> });
<add> });
<add>
<add> it('should convert between radians and degrees', function() {
<add> expect(math.toRadians(180)).toBe(Math.PI);
<add> expect(math.toRadians(90)).toBe(0.5 * Math.PI);
<add> expect(math.toDegrees(Math.PI)).toBe(180);
<add> expect(math.toDegrees(Math.PI * 3 / 2)).toBe(270);
<add> });
<add>
<add> it('should correctly determine if two numbers are essentially equal', function() {
<add> expect(math.almostEquals(0, Number.EPSILON, 2 * Number.EPSILON)).toBe(true);
<add> expect(math.almostEquals(1, 1.1, 0.0001)).toBe(false);
<add> expect(math.almostEquals(1e30, 1e30 + Number.EPSILON, 0)).toBe(false);
<add> expect(math.almostEquals(1e30, 1e30 + Number.EPSILON, 2 * Number.EPSILON)).toBe(true);
<add> });
<add>
<add> it('should get the correct sign', function() {
<add> expect(math.sign(0)).toBe(0);
<add> expect(math.sign(10)).toBe(1);
<add> expect(math.sign(-5)).toBe(-1);
<add> });
<add>
<add> it('should correctly determine if a numbers are essentially whole', function() {
<add> expect(math.almostWhole(0.99999, 0.0001)).toBe(true);
<add> expect(math.almostWhole(0.9, 0.0001)).toBe(false);
<add> expect(math.almostWhole(1234567890123, 0.0001)).toBe(true);
<add> expect(math.almostWhole(1234567890123.001, 0.0001)).toBe(false);
<add> });
<add>
<add> it('should detect a number', function() {
<add> expect(math.isNumber(123)).toBe(true);
<add> expect(math.isNumber('123')).toBe(true);
<add> expect(math.isNumber(null)).toBe(false);
<add> expect(math.isNumber(NaN)).toBe(false);
<add> expect(math.isNumber(undefined)).toBe(false);
<add> expect(math.isNumber('cbc')).toBe(false);
<add> });
<ide> }); | 25 |
PHP | PHP | add missing return datatype | a1b0a996ab9ab04702a407169c29a34a17173c97 | <ide><path>src/Illuminate/Queue/Console/ListFailedCommand.php
<ide> private function extractJobName($payload)
<ide> * Match the job name from the payload.
<ide> *
<ide> * @param array $payload
<del> * @return string
<add> * @return string|null
<ide> */
<ide> protected function matchJobName($payload)
<ide> {
<ide><path>src/Illuminate/Queue/Jobs/RedisJob.php
<ide> public function attempts()
<ide> /**
<ide> * Get the job identifier.
<ide> *
<del> * @return string
<add> * @return string|null
<ide> */
<ide> public function getJobId()
<ide> {
<ide><path>src/Illuminate/Routing/Route.php
<ide> public function getDomain()
<ide> /**
<ide> * Get the prefix of the route instance.
<ide> *
<del> * @return string
<add> * @return string|null
<ide> */
<ide> public function getPrefix()
<ide> {
<ide> public function setUri($uri)
<ide> /**
<ide> * Get the name of the route instance.
<ide> *
<del> * @return string
<add> * @return string|null
<ide> */
<ide> public function getName()
<ide> { | 3 |
Go | Go | move "commit" to daemon/commit.go | fdad41f5b921dfa06eec613b86147a22aeea5835 | <ide><path>daemon/commit.go
<add>package daemon
<add>
<add>import (
<add> "github.com/docker/docker/engine"
<add> "github.com/docker/docker/image"
<add> "github.com/docker/docker/runconfig"
<add>)
<add>
<add>func (daemon *Daemon) ContainerCommit(job *engine.Job) engine.Status {
<add> if len(job.Args) != 1 {
<add> return job.Errorf("Not enough arguments. Usage: %s CONTAINER\n", job.Name)
<add> }
<add> name := job.Args[0]
<add>
<add> container := daemon.Get(name)
<add> if container == nil {
<add> return job.Errorf("No such container: %s", name)
<add> }
<add>
<add> var (
<add> config = container.Config
<add> newConfig runconfig.Config
<add> )
<add>
<add> if err := job.GetenvJson("config", &newConfig); err != nil {
<add> return job.Error(err)
<add> }
<add>
<add> if err := runconfig.Merge(&newConfig, config); err != nil {
<add> return job.Error(err)
<add> }
<add>
<add> img, err := daemon.Commit(container, job.Getenv("repo"), job.Getenv("tag"), job.Getenv("comment"), job.Getenv("author"), job.GetenvBool("pause"), &newConfig)
<add> if err != nil {
<add> return job.Error(err)
<add> }
<add> job.Printf("%s\n", img.ID)
<add> return engine.StatusOK
<add>}
<add>
<add>// Commit creates a new filesystem image from the current state of a container.
<add>// The image can optionally be tagged into a repository
<add>func (daemon *Daemon) Commit(container *Container, repository, tag, comment, author string, pause bool, config *runconfig.Config) (*image.Image, error) {
<add> if pause {
<add> container.Pause()
<add> defer container.Unpause()
<add> }
<add>
<add> if err := container.Mount(); err != nil {
<add> return nil, err
<add> }
<add> defer container.Unmount()
<add>
<add> rwTar, err := container.ExportRw()
<add> if err != nil {
<add> return nil, err
<add> }
<add> defer rwTar.Close()
<add>
<add> // Create a new image from the container's base layers + a new layer from container changes
<add> var (
<add> containerID, containerImage string
<add> containerConfig *runconfig.Config
<add> )
<add>
<add> if container != nil {
<add> containerID = container.ID
<add> containerImage = container.Image
<add> containerConfig = container.Config
<add> }
<add>
<add> img, err := daemon.graph.Create(rwTar, containerID, containerImage, comment, author, containerConfig, config)
<add> if err != nil {
<add> return nil, err
<add> }
<add>
<add> // Register the image if needed
<add> if repository != "" {
<add> if err := daemon.repositories.Set(repository, tag, img.ID, true); err != nil {
<add> return img, err
<add> }
<add> }
<add> return img, nil
<add>}
<ide><path>daemon/daemon.go
<ide> func (daemon *Daemon) Install(eng *engine.Engine) error {
<ide> if err := eng.Register("resize", daemon.ContainerResize); err != nil {
<ide> return err
<ide> }
<add> if err := eng.Register("commit", daemon.ContainerCommit); err != nil {
<add> return err
<add> }
<ide> return nil
<ide> }
<ide>
<ide> func (daemon *Daemon) createRootfs(container *Container, img *image.Image) error
<ide> return nil
<ide> }
<ide>
<del>// Commit creates a new filesystem image from the current state of a container.
<del>// The image can optionally be tagged into a repository
<del>func (daemon *Daemon) Commit(container *Container, repository, tag, comment, author string, pause bool, config *runconfig.Config) (*image.Image, error) {
<del> if pause {
<del> container.Pause()
<del> defer container.Unpause()
<del> }
<del>
<del> if err := container.Mount(); err != nil {
<del> return nil, err
<del> }
<del> defer container.Unmount()
<del>
<del> rwTar, err := container.ExportRw()
<del> if err != nil {
<del> return nil, err
<del> }
<del> defer rwTar.Close()
<del>
<del> // Create a new image from the container's base layers + a new layer from container changes
<del> var (
<del> containerID, containerImage string
<del> containerConfig *runconfig.Config
<del> )
<del>
<del> if container != nil {
<del> containerID = container.ID
<del> containerImage = container.Image
<del> containerConfig = container.Config
<del> }
<del>
<del> img, err := daemon.graph.Create(rwTar, containerID, containerImage, comment, author, containerConfig, config)
<del> if err != nil {
<del> return nil, err
<del> }
<del>
<del> // Register the image if needed
<del> if repository != "" {
<del> if err := daemon.repositories.Set(repository, tag, img.ID, true); err != nil {
<del> return img, err
<del> }
<del> }
<del> return img, nil
<del>}
<del>
<ide> func GetFullContainerName(name string) (string, error) {
<ide> if name == "" {
<ide> return "", fmt.Errorf("Container name cannot be empty")
<ide><path>server/container.go
<ide> import (
<ide> "github.com/docker/docker/engine"
<ide> "github.com/docker/docker/pkg/graphdb"
<ide> "github.com/docker/docker/pkg/tailfile"
<del> "github.com/docker/docker/runconfig"
<ide> "github.com/docker/docker/utils"
<ide> )
<ide>
<ide> func (srv *Server) Containers(job *engine.Job) engine.Status {
<ide> return engine.StatusOK
<ide> }
<ide>
<del>func (srv *Server) ContainerCommit(job *engine.Job) engine.Status {
<del> if len(job.Args) != 1 {
<del> return job.Errorf("Not enough arguments. Usage: %s CONTAINER\n", job.Name)
<del> }
<del> name := job.Args[0]
<del>
<del> container := srv.daemon.Get(name)
<del> if container == nil {
<del> return job.Errorf("No such container: %s", name)
<del> }
<del>
<del> var (
<del> config = container.Config
<del> newConfig runconfig.Config
<del> )
<del>
<del> if err := job.GetenvJson("config", &newConfig); err != nil {
<del> return job.Error(err)
<del> }
<del>
<del> if err := runconfig.Merge(&newConfig, config); err != nil {
<del> return job.Error(err)
<del> }
<del>
<del> img, err := srv.daemon.Commit(container, job.Getenv("repo"), job.Getenv("tag"), job.Getenv("comment"), job.Getenv("author"), job.GetenvBool("pause"), &newConfig)
<del> if err != nil {
<del> return job.Error(err)
<del> }
<del> job.Printf("%s\n", img.ID)
<del> return engine.StatusOK
<del>}
<del>
<ide> func (srv *Server) ContainerDestroy(job *engine.Job) engine.Status {
<ide> if len(job.Args) != 1 {
<ide> return job.Errorf("Not enough arguments. Usage: %s CONTAINER\n", job.Name)
<ide><path>server/init.go
<ide> func InitServer(job *engine.Job) engine.Status {
<ide>
<ide> for name, handler := range map[string]engine.Handler{
<ide> "tag": srv.ImageTag, // FIXME merge with "image_tag"
<del> "commit": srv.ContainerCommit,
<ide> "info": srv.DockerInfo,
<ide> "container_delete": srv.ContainerDestroy,
<ide> "image_export": srv.ImageExport, | 4 |
Javascript | Javascript | use global.gc() instead of gc() | 715e3c6226aa2bd5ec6ed00db1185674e4989e07 | <ide><path>test/parallel/test-fs-filehandle.js
<ide> common.expectWarning({
<ide> ]
<ide> });
<ide>
<del>gc(); // eslint-disable-line no-undef
<add>global.gc();
<ide>
<ide> setTimeout(() => {}, 10); | 1 |
Javascript | Javascript | create source with buffers for wasm | d5ad14813fac3e86523fadbb1c10b0edae27ea91 | <ide><path>lib/wasm/WebAssemblyGenerator.js
<ide> class WebAssemblyGenerator extends Generator {
<ide> // TODO remove this casts when webpack-sources is fixed
<ide> // source() should have return type (string | Buffer)
<ide> const sourceAsAny = /** @type {TODO} */ (source);
<del> let bin = /** @type {ArrayBuffer} */ (sourceAsAny);
<add> const buf = /** @type {Buffer} */ (sourceAsAny);
<add> const bin = buf.buffer;
<ide>
<ide> const usedExports = moduleGraph.getUsedExports(module);
<ide> const initFuncId = t.identifier(
<ide> class WebAssemblyGenerator extends Generator {
<ide>
<ide> const newBin = transform(bin);
<ide>
<del> return new RawSource(newBin);
<add> const newBuf = Buffer.from(newBin);
<add>
<add> return new RawSource(/** @type {TODO} */ (newBuf));
<ide> }
<ide> }
<ide> | 1 |
Python | Python | remove unnecessary imports | 3184aab941f28dc6c5c305fbac43263bc66f4d1a | <ide><path>numpy/distutils/command/build_ext.py
<ide> msvc_version
<ide> from numpy.distutils.command.config_compiler import show_fortran_compilers
<ide>
<del>try:
<del> set
<del>except NameError:
<del> from sets import Set as set
<ide>
<ide> class build_ext (old_build_ext):
<ide>
<ide><path>numpy/distutils/fcompiler/__init__.py
<ide> import sys
<ide> import re
<ide> import types
<del>try:
<del> set
<del>except NameError:
<del> from sets import Set as set
<ide>
<ide> from numpy.compat import open_latin1
<ide>
<ide><path>numpy/distutils/misc_util.py
<ide> def clean_up_temporary_directory():
<ide>
<ide> atexit.register(clean_up_temporary_directory)
<ide>
<del>try:
<del> set
<del>except NameError:
<del> from sets import Set as set
<ide>
<ide> from numpy.distutils.compat import get_exception
<ide> from numpy.compat import basestring | 3 |
Mixed | Javascript | allow specifying labels in time scale options | abbddd1298c6dee2e294bf6689b4e62160087347 | <ide><path>docs/axes/cartesian/time.md
<ide> The `ticks.source` property controls the ticks generation.
<ide>
<ide> * `'auto'`: generates "optimal" ticks based on scale size and time options
<ide> * `'data'`: generates ticks from data (including labels from data `{t|x|y}` objects)
<del>* `'labels'`: generates ticks from user given `data.labels` values ONLY
<add>* `'labels'`: generates ticks from user given `labels` ONLY
<ide>
<ide> ### Parser
<ide> If this property is defined as a string, it is interpreted as a custom format to be used by Moment.js to parse the date.
<ide><path>src/core/core.scale.js
<ide> var Scale = Element.extend({
<ide> return this._ticks;
<ide> },
<ide>
<add> /**
<add> * @private
<add> */
<add> _getLabels: function() {
<add> var data = this.chart.data;
<add> return this.options.labels || (this.isHorizontal() ? data.xLabels : data.yLabels) || data.labels;
<add> },
<add>
<ide> // These methods are ordered by lifecyle. Utilities then follow.
<ide> // Any function defined here is inherited by all scale types.
<ide> // Any function can be extended by the scale type
<ide><path>src/scales/scale.category.js
<ide> var defaultConfig = {
<ide> };
<ide>
<ide> module.exports = Scale.extend({
<del> /**
<del> * Internal function to get the correct labels. If data.xLabels or data.yLabels are defined, use those
<del> * else fall back to data.labels
<del> * @private
<del> */
<del> getLabels: function() {
<del> var data = this.chart.data;
<del> return this.options.labels || (this.isHorizontal() ? data.xLabels : data.yLabels) || data.labels;
<del> },
<del>
<ide> determineDataLimits: function() {
<ide> var me = this;
<del> var labels = me.getLabels();
<add> var labels = me._getLabels();
<ide> me.minIndex = 0;
<ide> me.maxIndex = labels.length - 1;
<ide> var findIndex;
<ide> module.exports = Scale.extend({
<ide>
<ide> buildTicks: function() {
<ide> var me = this;
<del> var labels = me.getLabels();
<add> var labels = me._getLabels();
<ide> // If we are viewing some subset of labels, slice the original array
<ide> me.ticks = (me.minIndex === 0 && me.maxIndex === labels.length - 1) ? labels : labels.slice(me.minIndex, me.maxIndex + 1);
<ide> },
<ide> module.exports = Scale.extend({
<ide> valueCategory = me.isHorizontal() ? value.x : value.y;
<ide> }
<ide> if (valueCategory !== undefined || (value !== undefined && isNaN(index))) {
<del> var labels = me.getLabels();
<add> var labels = me._getLabels();
<ide> value = valueCategory || value;
<ide> var idx = labels.indexOf(value);
<ide> index = idx !== -1 ? idx : index;
<ide><path>src/scales/scale.time.js
<ide> module.exports = Scale.extend({
<ide> var datasets = [];
<ide> var labels = [];
<ide> var i, j, ilen, jlen, data, timestamp;
<del> var dataLabels = chart.data.labels || [];
<add> var dataLabels = me._getLabels();
<ide>
<ide> // Convert labels to timestamps
<ide> for (i = 0, ilen = dataLabels.length; i < ilen; ++i) {
<ide><path>test/specs/scale.time.tests.js
<ide> describe('Time scale tests', function() {
<ide> });
<ide> });
<ide>
<add> describe('labels', function() {
<add> it('should read labels from scale / xLabels / yLabels', function() {
<add> var timeOpts = {
<add> parser: 'YYYY',
<add> unit: 'year',
<add> displayFormats: {
<add> year: 'YYYY'
<add> }
<add> };
<add> var chart = window.acquireChart({
<add> type: 'line',
<add> data: {
<add> labels: ['1975', '1976', '1977'],
<add> xLabels: ['1985', '1986', '1987'],
<add> yLabels: ['1995', '1996', '1997']
<add> },
<add> options: {
<add> scales: {
<add> xAxes: [{
<add> id: 'x',
<add> type: 'time',
<add> labels: ['2015', '2016', '2017'],
<add> time: timeOpts
<add> },
<add> {
<add> id: 'x2',
<add> type: 'time',
<add> time: timeOpts
<add> }],
<add> yAxes: [{
<add> id: 'y',
<add> type: 'time',
<add> time: timeOpts
<add> },
<add> {
<add> id: 'y2',
<add> type: 'time',
<add> labels: ['2005', '2006', '2007'],
<add> time: timeOpts
<add> }]
<add> }
<add> }
<add> });
<add>
<add> expect(getTicksLabels(chart.scales.x)).toEqual(['2015', '2016', '2017']);
<add> expect(getTicksLabels(chart.scales.x2)).toEqual(['1985', '1986', '1987']);
<add> expect(getTicksLabels(chart.scales.y)).toEqual(['1995', '1996', '1997']);
<add> expect(getTicksLabels(chart.scales.y2)).toEqual(['2005', '2006', '2007']);
<add> });
<add> });
<add>
<ide> describe('Deprecations', function() {
<ide> describe('options.time.displayFormats', function() {
<ide> it('should generate defaults from adapter presets', function() { | 5 |
Ruby | Ruby | fix another leftover | 4b606d07137aab247552313c6041bdca89456604 | <ide><path>railties/lib/rails/cli.rb
<del>require 'rails/app_rails_loader'
<add>require 'rails/app_loader'
<ide>
<ide> # If we are inside a Rails application this method performs an exec and thus
<ide> # the rest of this script is not run. | 1 |
Ruby | Ruby | remove unncessary arguments passed to arel_table | fc94c03c1d509eda7f87ab216e8d1115afbfd1ed | <ide><path>activerecord/lib/active_record/associations.rb
<ide> def select_limited_ids_array(options, join_dependency)
<ide> def construct_finder_sql_for_association_limiting(options, join_dependency)
<ide> scope = scope(:find)
<ide>
<del> relation = arel_table(options[:from])
<add> relation = arel_table
<ide>
<ide> for association in join_dependency.join_associations
<ide> relation = association.join_relation(relation)
<ide> def construct_finder_sql_for_association_limiting(options, join_dependency)
<ide> order(construct_order(options[:order], scope)).
<ide> limit(construct_limit(options[:limit], scope)).
<ide> offset(construct_limit(options[:offset], scope)).
<add> from(options[:from]).
<ide> select(connection.distinct("#{connection.quote_table_name table_name}.#{primary_key}", construct_order(options[:order], scope(:find)).join(",")))
<ide>
<ide> relation.to_sql
<ide><path>activerecord/lib/active_record/locking/optimistic.rb
<ide> def update_with_lock(attribute_names = @attributes.keys) #:nodoc:
<ide> attribute_names.uniq!
<ide>
<ide> begin
<del> arel_table = self.class.arel_table(self.class.table_name)
<add> arel_table = self.class.arel_table
<ide>
<ide> affected_rows = arel_table.where(
<ide> arel_table[self.class.primary_key].eq(quoted_id).and( | 2 |
Java | Java | update copyright date in contextloaderutils | dc6eaad2e96fac35df9ef2595d5acf7379a59525 | <ide><path>spring-test/src/main/java/org/springframework/test/context/ContextLoaderUtils.java
<ide> /*
<del> * Copyright 2002-2013 the original author or authors.
<add> * Copyright 2002-2014 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License. | 1 |
Javascript | Javascript | correct the tests | 8fe4295a064caa699edd902711d396164cf092f4 | <ide><path>test/ng/directive/ngIncludeSpec.js
<ide> describe('ng-include', function() {
<ide> $rootScope.url = 'url1';
<ide> $rootScope.$digest();
<ide> $httpBackend.flush();
<del> expect(element.children().scope()).toBeTruthy();
<add> expect(element.children().scope().$parent).toBe($rootScope);
<ide> expect(element.text()).toBe('partial url1');
<ide>
<ide> $rootScope.url = 'url2';
<ide> $rootScope.$digest();
<ide> $httpBackend.flush();
<del> expect(element.children().scope()).toBeFalsy();
<add> expect($rootScope.$$childHead).toBeFalsy();
<ide> expect(element.text()).toBe('');
<ide>
<ide> $rootScope.url = 'url1';
<ide> $rootScope.$digest();
<del> expect(element.children().scope()).toBeTruthy();
<add> expect(element.children().scope().$parent).toBe($rootScope);
<ide>
<ide> $rootScope.url = null;
<ide> $rootScope.$digest();
<del> expect(element.children().scope()).toBeFalsy();
<add> expect($rootScope.$$childHead).toBeFalsy();
<ide> }));
<ide>
<ide> | 1 |
PHP | PHP | add ssl option | 76d2075ffc137b882df833d61f4eab544dee4cae | <ide><path>lib/Cake/Routing/Router.php
<ide> public static function promote($which = null) {
<ide> *
<ide> * There are a few 'special' parameters that can change the final URL string that is generated
<ide> *
<del> * - `base` - Set to false to remove the base path from the generated url. If your application
<add> * - `_base` - Set to false to remove the base path from the generated url. If your application
<ide> * is not in the root directory, this can be used to generate urls that are 'cake relative'.
<ide> * cake relative urls are required when using requestAction.
<del> * - `?` - Takes an array of query string parameters. (Deprecated)
<add> * - `_full` - If true the `FULL_BASE_URL` constant will be prepended to generated urls.
<ide> * - `#` - Allows you to set url hash fragments.
<del> * - `full_base` - If true the `FULL_BASE_URL` constant will be prepended to generated urls.
<add> * - `ssl` - Set to true to convert the generated url to https, or false to force http.
<ide> *
<ide> * @param string|array $url Cake-relative URL, like "/products/edit/92" or "/presidents/elect/4"
<ide> * or an array specifying any of the following: 'controller', 'action',
<ide> public static function url($url = null, $full = false) {
<ide> $here = null;
<ide> }
<ide>
<del> $extension = $output = $q = $frag = null;
<add> $extension = $output = $frag = null;
<ide>
<ide> if (empty($url)) {
<ide> $output = isset($here) ? $here : '/';
<ide> public static function url($url = null, $full = false) {
<ide> $extension = '.' . $url['ext'];
<ide> unset($url['ext']);
<ide> }
<add> if (isset($url['ssl'])) {
<add> $url['_scheme'] = ($url['ssl'] == true) ? 'https' : 'http';
<add> unset($url['ssl']);
<add> }
<ide>
<ide> // Copy the current action if the controller is the current one.
<ide> if (empty($url['action'])) {
<ide><path>lib/Cake/Test/TestCase/Routing/RouterTest.php
<ide> public function testUrlGenerationWithAutoPrefixes() {
<ide> $this->assertEquals($expected, $result);
<ide> }
<ide>
<add>/**
<add> * Test that the ssl option works.
<add> *
<add> * @return void
<add> */
<add> public function testGenerationWithSslOption() {
<add> Router::connect('/:controller/:action/*');
<add> $_SERVER['HTTP_HOST'] = 'localhost';
<add>
<add> $request = new Request();
<add> Router::pushRequest(
<add> $request->addParams(array(
<add> 'plugin' => null, 'controller' => 'images', 'action' => 'index'
<add> ))->addPaths(array(
<add> 'base' => '',
<add> 'here' => '/images/index',
<add> 'webroot' => '/',
<add> ))
<add> );
<add>
<add> $result = Router::url(array(
<add> 'ssl' => true
<add> ));
<add> $this->assertEquals('https://localhost/images/index', $result);
<add>
<add> $result = Router::url(array(
<add> 'ssl' => false
<add> ));
<add> $this->assertEquals('http://localhost/images/index', $result);
<add> }
<add>
<add>/**
<add> * Test ssl option when the current request is ssl.
<add> *
<add> * @return void
<add> */
<add> public function testGenerateWithSslInSsl() {
<add> Router::connect('/:controller/:action/*');
<add> $_SERVER['HTTP_HOST'] = 'localhost';
<add> $_SERVER['HTTPS'] = 'on';
<add>
<add> $request = new Request();
<add> Router::pushRequest(
<add> $request->addParams(array(
<add> 'plugin' => null,
<add> 'controller' => 'images',
<add> 'action' => 'index'
<add> ))->addPaths(array(
<add> 'base' => '',
<add> 'here' => '/images/index',
<add> 'webroot' => '/',
<add> ))
<add> );
<add>
<add> $result = Router::url(array(
<add> 'ssl' => false
<add> ));
<add> $this->assertEquals('http://localhost/images/index', $result);
<add>
<add> $result = Router::url(array(
<add> 'ssl' => true
<add> ));
<add> $this->assertEquals('https://localhost/images/index', $result);
<add> }
<add>
<ide> /**
<ide> * test that auto-generated prefix routes persist
<ide> * | 2 |
Mixed | Javascript | support all arraybufferview types | a8f460f12d81f63d95b3f1bc12a89e36cae2b271 | <ide><path>doc/api/crypto.md
<ide> const cert2 = crypto.Certificate();
<ide> <!-- YAML
<ide> added: v0.11.8
<ide> -->
<del>- `spkac` {string | Buffer | Uint8Array}
<add>- `spkac` {string | Buffer | TypedArray | DataView}
<ide> - Returns {Buffer} The challenge component of the `spkac` data structure, which
<ide> includes a public key and a challenge.
<ide>
<ide> console.log(challenge.toString('utf8'));
<ide> <!-- YAML
<ide> added: v0.11.8
<ide> -->
<del>- `spkac` {string | Buffer | Uint8Array}
<add>- `spkac` {string | Buffer | TypedArray | DataView}
<ide> - Returns {Buffer} The public key component of the `spkac` data structure,
<ide> which includes a public key and a challenge.
<ide>
<ide> console.log(publicKey);
<ide> <!-- YAML
<ide> added: v0.11.8
<ide> -->
<del>- `spkac` {Buffer | Uint8Array}
<add>- `spkac` {Buffer | TypedArray | DataView}
<ide> - Returns {boolean} `true` if the given `spkac` data structure is valid, `false`
<ide> otherwise.
<ide>
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/5522
<ide> description: The default `input_encoding` changed from `binary` to `utf8`.
<ide> -->
<del>- `data` {string | Buffer | Uint8Array}
<add>- `data` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide> - `output_encoding` {string}
<ide>
<ide> Updates the cipher with `data`. If the `input_encoding` argument is given,
<ide> its value must be one of `'utf8'`, `'ascii'`, or `'latin1'` and the `data`
<ide> argument is a string using the specified encoding. If the `input_encoding`
<del>argument is not given, `data` must be a [`Buffer`][] or `Uint8Array`.
<del>If `data` is a [`Buffer`][] or `Uint8Array`, then `input_encoding` is ignored.
<add>argument is not given, `data` must be a [`Buffer`][], `TypedArray`, or
<add>`DataView`. If `data` is a [`Buffer`][], `TypedArray`, or `DataView`, then
<add>`input_encoding` is ignored.
<ide>
<ide> The `output_encoding` specifies the output format of the enciphered
<ide> data, and can be `'latin1'`, `'base64'` or `'hex'`. If the `output_encoding`
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/9398
<ide> description: This method now returns a reference to `decipher`.
<ide> -->
<del>- `buffer` {Buffer | Uint8Array}
<add>- `buffer` {Buffer | TypedArray | DataView}
<ide> - Returns the {Cipher} for method chaining.
<ide>
<ide> When using an authenticated encryption mode (only `GCM` is currently
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/9398
<ide> description: This method now returns a reference to `decipher`.
<ide> -->
<del>- `buffer` {Buffer | Uint8Array}
<add>- `buffer` {Buffer | TypedArray | DataView}
<ide> - Returns the {Cipher} for method chaining.
<ide>
<ide> When using an authenticated encryption mode (only `GCM` is currently
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/5522
<ide> description: The default `input_encoding` changed from `binary` to `utf8`.
<ide> -->
<del>- `data` {string | Buffer | Uint8Array}
<add>- `data` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide> - `output_encoding` {string}
<ide>
<ide> assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex'));
<ide> <!-- YAML
<ide> added: v0.5.0
<ide> -->
<del>- `other_public_key` {string | Buffer | Uint8Array}
<add>- `other_public_key` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide> - `output_encoding` {string}
<ide>
<ide> party's public key and returns the computed shared secret. The supplied
<ide> key is interpreted using the specified `input_encoding`, and secret is
<ide> encoded using specified `output_encoding`. Encodings can be
<ide> `'latin1'`, `'hex'`, or `'base64'`. If the `input_encoding` is not
<del>provided, `other_public_key` is expected to be a [`Buffer`][] or `Uint8Array`.
<add>provided, `other_public_key` is expected to be a [`Buffer`][],
<add>`TypedArray`, or `DataView`.
<ide>
<ide> If `output_encoding` is given a string is returned; otherwise, a
<ide> [`Buffer`][] is returned.
<ide> string is returned; otherwise a [`Buffer`][] is returned.
<ide> <!-- YAML
<ide> added: v0.5.0
<ide> -->
<del>- `private_key` {string | Buffer | Uint8Array}
<add>- `private_key` {string | Buffer | TypedArray | DataView}
<ide> - `encoding` {string}
<ide>
<ide> Sets the Diffie-Hellman private key. If the `encoding` argument is provided
<ide> and is either `'latin1'`, `'hex'`, or `'base64'`, `private_key` is expected
<ide> to be a string. If no `encoding` is provided, `private_key` is expected
<del>to be a [`Buffer`][] or `Uint8Array`.
<add>to be a [`Buffer`][], `TypedArray`, or `DataView`.
<ide>
<ide> ### diffieHellman.setPublicKey(public_key[, encoding])
<ide> <!-- YAML
<ide> added: v0.5.0
<ide> -->
<del>- `public_key` {string | Buffer | Uint8Array}
<add>- `public_key` {string | Buffer | TypedArray | DataView}
<ide> - `encoding` {string}
<ide>
<ide> Sets the Diffie-Hellman public key. If the `encoding` argument is provided
<ide> and is either `'latin1'`, `'hex'` or `'base64'`, `public_key` is expected
<ide> to be a string. If no `encoding` is provided, `public_key` is expected
<del>to be a [`Buffer`][] or `Uint8Array`.
<add>to be a [`Buffer`][], `TypedArray`, or `DataView`.
<ide>
<ide> ### diffieHellman.verifyError
<ide> <!-- YAML
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/5522
<ide> description: The default `input_encoding` changed from `binary` to `utf8`.
<ide> -->
<del>- `other_public_key` {string | Buffer | Uint8Array}
<add>- `other_public_key` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide> - `output_encoding` {string}
<ide>
<ide> party's public key and returns the computed shared secret. The supplied
<ide> key is interpreted using specified `input_encoding`, and the returned secret
<ide> is encoded using the specified `output_encoding`. Encodings can be
<ide> `'latin1'`, `'hex'`, or `'base64'`. If the `input_encoding` is not
<del>provided, `other_public_key` is expected to be a [`Buffer`][] or `Uint8Array`.
<add>provided, `other_public_key` is expected to be a [`Buffer`][], `TypedArray`, or
<add>`DataView`.
<ide>
<ide> If `output_encoding` is given a string will be returned; otherwise a
<ide> [`Buffer`][] is returned.
<ide> returned.
<ide> <!-- YAML
<ide> added: v0.11.14
<ide> -->
<del>- `private_key` {string | Buffer | Uint8Array}
<add>- `private_key` {string | Buffer | TypedArray | DataView}
<ide> - `encoding` {string}
<ide>
<ide> Sets the EC Diffie-Hellman private key. The `encoding` can be `'latin1'`,
<ide> `'hex'` or `'base64'`. If `encoding` is provided, `private_key` is expected
<del>to be a string; otherwise `private_key` is expected to be a [`Buffer`][]
<del>or `Uint8Array`.
<add>to be a string; otherwise `private_key` is expected to be a [`Buffer`][],
<add>`TypedArray`, or `DataView`.
<add>
<ide> If `private_key` is not valid for the curve specified when the `ECDH` object was
<ide> created, an error is thrown. Upon setting the private key, the associated
<ide> public point (key) is also generated and set in the ECDH object.
<ide> deprecated: v5.2.0
<ide>
<ide> > Stability: 0 - Deprecated
<ide>
<del>- `public_key` {string | Buffer | Uint8Array}
<add>- `public_key` {string | Buffer | TypedArray | DataView}
<ide> - `encoding` {string}
<ide>
<ide> Sets the EC Diffie-Hellman public key. Key encoding can be `'latin1'`,
<ide> `'hex'` or `'base64'`. If `encoding` is provided `public_key` is expected to
<del>be a string; otherwise a [`Buffer`][] or `Uint8Array` is expected.
<add>be a string; otherwise a [`Buffer`][], `TypedArray`, or `DataView` is expected.
<ide>
<ide> Note that there is not normally a reason to call this method because `ECDH`
<ide> only requires a private key and the other party's public key to compute the
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/5522
<ide> description: The default `input_encoding` changed from `binary` to `utf8`.
<ide> -->
<del>- `data` {string | Buffer | Uint8Array}
<add>- `data` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide>
<ide> Updates the hash content with the given `data`, the encoding of which
<ide> is given in `input_encoding` and can be `'utf8'`, `'ascii'` or
<ide> `'latin1'`. If `encoding` is not provided, and the `data` is a string, an
<del>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][] or `Uint8Array`
<del>then `input_encoding` is ignored.
<add>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][], `TypedArray`, or
<add>`DataView`, then `input_encoding` is ignored.
<ide>
<ide> This can be called many times with new data as it is streamed.
<ide>
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/5522
<ide> description: The default `input_encoding` changed from `binary` to `utf8`.
<ide> -->
<del>- `data` {string | Buffer | Uint8Array}
<add>- `data` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide>
<ide> Updates the `Hmac` content with the given `data`, the encoding of which
<ide> is given in `input_encoding` and can be `'utf8'`, `'ascii'` or
<ide> `'latin1'`. If `encoding` is not provided, and the `data` is a string, an
<del>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][] or `Uint8Array`
<del>then `input_encoding` is ignored.
<add>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][], `TypedArray`, or
<add>`DataView`, then `input_encoding` is ignored.
<ide>
<ide> This can be called many times with new data as it is streamed.
<ide>
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/5522
<ide> description: The default `input_encoding` changed from `binary` to `utf8`.
<ide> -->
<del>- `data` {string | Buffer | Uint8Array}
<add>- `data` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide>
<ide> Updates the `Sign` content with the given `data`, the encoding of which
<ide> is given in `input_encoding` and can be `'utf8'`, `'ascii'` or
<ide> `'latin1'`. If `encoding` is not provided, and the `data` is a string, an
<del>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][] or `Uint8Array`
<del>then `input_encoding` is ignored.
<add>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][], `TypedArray`, or
<add>`DataView`, then `input_encoding` is ignored.
<ide>
<ide> This can be called many times with new data as it is streamed.
<ide>
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/5522
<ide> description: The default `input_encoding` changed from `binary` to `utf8`.
<ide> -->
<del>- `data` {string | Buffer | Uint8Array}
<add>- `data` {string | Buffer | TypedArray | DataView}
<ide> - `input_encoding` {string}
<ide>
<ide> Updates the `Verify` content with the given `data`, the encoding of which
<ide> is given in `input_encoding` and can be `'utf8'`, `'ascii'` or
<ide> `'latin1'`. If `encoding` is not provided, and the `data` is a string, an
<del>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][] or `Uint8Array`
<del>then `input_encoding` is ignored.
<add>encoding of `'utf8'` is enforced. If `data` is a [`Buffer`][], `TypedArray`, or
<add>`DataView`, then `input_encoding` is ignored.
<ide>
<ide> This can be called many times with new data as it is streamed.
<ide>
<ide> changes:
<ide> description: Support for RSASSA-PSS and additional options was added.
<ide> -->
<ide> - `object` {string | Object}
<del>- `signature` {string | Buffer | Uint8Array}
<add>- `signature` {string | Buffer | TypedArray | DataView}
<ide> - `signature_format` {string}
<ide>
<ide> Verifies the provided data using the given `object` and `signature`.
<ide> or an object with one or more of the following properties:
<ide> The `signature` argument is the previously calculated signature for the data, in
<ide> the `signature_format` which can be `'latin1'`, `'hex'` or `'base64'`.
<ide> If a `signature_format` is specified, the `signature` is expected to be a
<del>string; otherwise `signature` is expected to be a [`Buffer`][] or
<del>`Uint8Array`.
<add>string; otherwise `signature` is expected to be a [`Buffer`][],
<add>`TypedArray`, or `DataView`.
<ide>
<ide> Returns `true` or `false` depending on the validity of the signature for
<ide> the data and public key.
<ide> currently in use. Setting to true requires a FIPS build of Node.js.
<ide> added: v0.1.94
<ide> -->
<ide> - `algorithm` {string}
<del>- `password` {string | Buffer | Uint8Array}
<add>- `password` {string | Buffer | TypedArray | DataView}
<ide>
<ide> Creates and returns a `Cipher` object that uses the given `algorithm` and
<ide> `password`.
<ide> recent OpenSSL releases, `openssl list-cipher-algorithms` will display the
<ide> available cipher algorithms.
<ide>
<ide> The `password` is used to derive the cipher key and initialization vector (IV).
<del>The value must be either a `'latin1'` encoded string, a [`Buffer`][] or a
<del>`Uint8Array`.
<add>The value must be either a `'latin1'` encoded string, a [`Buffer`][], a
<add>`TypedArray`, or a `DataView`.
<ide>
<ide> The implementation of `crypto.createCipher()` derives keys using the OpenSSL
<ide> function [`EVP_BytesToKey`][] with the digest algorithm set to MD5, one
<ide> to create the `Cipher` object.
<ide>
<ide> ### crypto.createCipheriv(algorithm, key, iv)
<ide> - `algorithm` {string}
<del>- `key` {string | Buffer | Uint8Array}
<del>- `iv` {string | Buffer | Uint8Array}
<add>- `key` {string | Buffer | TypedArray | DataView}
<add>- `iv` {string | Buffer | TypedArray | DataView}
<ide>
<ide> Creates and returns a `Cipher` object, with the given `algorithm`, `key` and
<ide> initialization vector (`iv`).
<ide> available cipher algorithms.
<ide>
<ide> The `key` is the raw key used by the `algorithm` and `iv` is an
<ide> [initialization vector][]. Both arguments must be `'utf8'` encoded strings,
<del>[Buffers][`Buffer`] or `Uint8Array`s.
<add>[Buffers][`Buffer`], `TypedArray`, or `DataView`s.
<ide>
<ide> ### crypto.createCredentials(details)
<ide> <!-- YAML
<ide> called.
<ide> added: v0.1.94
<ide> -->
<ide> - `algorithm` {string}
<del>- `password` {string | Buffer | Uint8Array}
<add>- `password` {string | Buffer | TypedArray | DataView}
<ide>
<ide> Creates and returns a `Decipher` object that uses the given `algorithm` and
<ide> `password` (key).
<ide> to create the `Decipher` object.
<ide> added: v0.1.94
<ide> -->
<ide> - `algorithm` {string}
<del>- `key` {string | Buffer | Uint8Array}
<del>- `iv` {string | Buffer | Uint8Array}
<add>- `key` {string | Buffer | TypedArray | DataView}
<add>- `iv` {string | Buffer | TypedArray | DataView}
<ide>
<ide> Creates and returns a `Decipher` object that uses the given `algorithm`, `key`
<ide> and initialization vector (`iv`).
<ide> The `key` is the raw key used by the `algorithm` and `iv` is an
<ide> <!-- YAML
<ide> added: v0.11.12
<ide> changes:
<add> - version: REPLACEME
<add> pr-url: REPLACEME
<add> description: The `prime` argument can be any `TypedArray` or `DataView` now.
<ide> - version: REPLACEME
<ide> pr-url: https://github.com/nodejs/node/pull/11983
<ide> description: The `prime` argument can be a `Uint8Array` now.
<ide> changes:
<ide> description: The default for the encoding parameters changed
<ide> from `binary` to `utf8`.
<ide> -->
<del>- `prime` {string | Buffer | Uint8Array}
<add>- `prime` {string | Buffer | TypedArray | DataView}
<ide> - `prime_encoding` {string}
<del>- `generator` {number | string | Buffer | Uint8Array} Defaults to `2`.
<add>- `generator` {number | string | Buffer | TypedArray | DataView} Defaults to `2`.
<ide> - `generator_encoding` {string}
<ide>
<ide> Creates a `DiffieHellman` key exchange object using the supplied `prime` and an
<ide> The `prime_encoding` and `generator_encoding` arguments can be `'latin1'`,
<ide> `'hex'`, or `'base64'`.
<ide>
<ide> If `prime_encoding` is specified, `prime` is expected to be a string; otherwise
<del>a [`Buffer`][] or `Uint8Array` is expected.
<add>a [`Buffer`][], `TypedArray`, or `DataView` is expected.
<ide>
<ide> If `generator_encoding` is specified, `generator` is expected to be a string;
<del>otherwise either a number or [`Buffer`][] or `Uint8Array` is expected.
<add>otherwise a number, [`Buffer`][], `TypedArray`, or `DataView` is expected.
<ide>
<ide> ### crypto.createDiffieHellman(prime_length[, generator])
<ide> <!-- YAML
<ide> added: v0.5.0
<ide> -->
<ide> - `prime_length` {number}
<del>- `generator` {number | string | Buffer | Uint8Array} Defaults to `2`.
<add>- `generator` {number | string | Buffer | TypedArray | DataView} Defaults to `2`.
<ide>
<ide> Creates a `DiffieHellman` key exchange object and generates a prime of
<ide> `prime_length` bits using an optional specific numeric `generator`.
<ide> input.on('readable', () => {
<ide> added: v0.1.94
<ide> -->
<ide> - `algorithm` {string}
<del>- `key` {string | Buffer | Uint8Array}
<add>- `key` {string | Buffer | TypedArray | DataView}
<ide>
<ide> Creates and returns an `Hmac` object that uses the given `algorithm` and `key`.
<ide>
<ide> added: v0.11.14
<ide> - `padding` {crypto.constants} An optional padding value defined in
<ide> `crypto.constants`, which may be: `crypto.constants.RSA_NO_PADDING`,
<ide> `RSA_PKCS1_PADDING`, or `crypto.constants.RSA_PKCS1_OAEP_PADDING`.
<del>- `buffer` {Buffer | Uint8Array}
<add>- `buffer` {Buffer | TypedArray | DataView}
<ide>
<ide> Decrypts `buffer` with `private_key`.
<ide>
<ide> added: v1.1.0
<ide> - `padding` {crypto.constants} An optional padding value defined in
<ide> `crypto.constants`, which may be: `crypto.constants.RSA_NO_PADDING` or
<ide> `RSA_PKCS1_PADDING`.
<del>- `buffer` {Buffer | Uint8Array}
<add>- `buffer` {Buffer | TypedArray | DataView}
<ide>
<ide> Encrypts `buffer` with `private_key`.
<ide>
<ide> added: v1.1.0
<ide> - `padding` {crypto.constants} An optional padding value defined in
<ide> `crypto.constants`, which may be: `crypto.constants.RSA_NO_PADDING`,
<ide> `RSA_PKCS1_PADDING`, or `crypto.constants.RSA_PKCS1_OAEP_PADDING`.
<del>- `buffer` {Buffer | Uint8Array}
<add>- `buffer` {Buffer | TypedArray | DataView}
<ide>
<ide> Decrypts `buffer` with `public_key`.
<ide>
<ide> added: v0.11.14
<ide> - `padding` {crypto.constants} An optional padding value defined in
<ide> `crypto.constants`, which may be: `crypto.constants.RSA_NO_PADDING`,
<ide> `RSA_PKCS1_PADDING`, or `crypto.constants.RSA_PKCS1_OAEP_PADDING`.
<del>- `buffer` {Buffer | Uint8Array}
<add>- `buffer` {Buffer | TypedArray | DataView}
<ide>
<ide> Encrypts `buffer` with `public_key`.
<ide>
<ide> is a bit field taking one of or a mix of the following flags (defined in
<ide> <!-- YAML
<ide> added: v6.6.0
<ide> -->
<del>- `a` {Buffer | Uint8Array}
<del>- `b` {Buffer | Uint8Array}
<add>- `a` {Buffer | TypedArray | DataView}
<add>- `b` {Buffer | TypedArray | DataView}
<ide>
<ide> Returns true if `a` is equal to `b`, without leaking timing information that
<ide> would allow an attacker to guess one of the values. This is suitable for
<ide> comparing HMAC digests or secret values like authentication cookies or
<ide> [capability urls](https://www.w3.org/TR/capability-urls/).
<ide>
<del>`a` and `b` must both be `Buffer`s or `Uint8Array`s, and they must have the
<del>same length.
<add>`a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they
<add>must have the same length.
<ide>
<ide> **Note**: Use of `crypto.timingSafeEqual` does not guarantee that the
<ide> *surrounding* code is timing-safe. Care should be taken to ensure that the
<ide><path>lib/crypto.js
<ide> const timingSafeEqual = binding.timingSafeEqual;
<ide> const Buffer = require('buffer').Buffer;
<ide> const stream = require('stream');
<ide> const util = require('util');
<del>const { isUint8Array } = process.binding('util');
<ide> const LazyTransform = require('internal/streams/lazy_transform');
<ide>
<ide> const DH_GENERATOR = 2;
<ide> function DiffieHellman(sizeOrKey, keyEncoding, generator, genEncoding) {
<ide>
<ide> if (typeof sizeOrKey !== 'number' &&
<ide> typeof sizeOrKey !== 'string' &&
<del> !isUint8Array(sizeOrKey)) {
<add> !ArrayBuffer.isView(sizeOrKey)) {
<ide> throw new TypeError('First argument should be number, string, ' +
<del> 'Uint8Array or Buffer');
<add> 'Buffer, TypedArray, or DataView');
<ide> }
<ide>
<ide> if (keyEncoding) {
<ide><path>test/parallel/test-crypto-dh.js
<ide> assert.strictEqual(secret2.toString('base64'), secret1);
<ide> assert.strictEqual(dh1.verifyError, 0);
<ide> assert.strictEqual(dh2.verifyError, 0);
<ide>
<del>const argumentsError =
<del> /^TypeError: First argument should be number, string, Uint8Array or Buffer$/;
<add>const argumentsError = new RegExp('^TypeError: First argument should be ' +
<add> 'number, string, Buffer, TypedArray, or ' +
<add> 'DataView$');
<ide>
<ide> assert.throws(() => {
<ide> crypto.createDiffieHellman([0x1, 0x2]);
<ide> const modp2buf = Buffer.from([
<ide> assert.strictEqual(exmodp2.verifyError, DH_NOT_SUITABLE_GENERATOR);
<ide> }
<ide>
<del>{
<del> // Ensure specific generator (string with encoding) works as expected.
<del> const exmodp2 = crypto.createDiffieHellman(modp2buf, '02', 'hex');
<del> exmodp2.generateKeys();
<del> const modp2Secret = modp2.computeSecret(exmodp2.getPublicKey())
<del> .toString('hex');
<del> const exmodp2Secret = exmodp2.computeSecret(modp2.getPublicKey())
<del> .toString('hex');
<del> assert.strictEqual(modp2Secret, exmodp2Secret);
<del> assert.strictEqual(exmodp2.verifyError, DH_NOT_SUITABLE_GENERATOR);
<del>}
<del>
<del>{
<del> // Ensure specific generator (string with encoding) works as expected,
<del> // with a Uint8Array as the first argument to createDiffieHellman().
<del> const exmodp2 = crypto.createDiffieHellman(new Uint8Array(modp2buf),
<del> '02', 'hex');
<add>for (const buf of [modp2buf, ...common.getArrayBufferViews(modp2buf)]) {
<add> // Ensure specific generator (string with encoding) works as expected with
<add> // any ArrayBufferViews as the first argument to createDiffieHellman().
<add> const exmodp2 = crypto.createDiffieHellman(buf, '02', 'hex');
<ide> exmodp2.generateKeys();
<ide> const modp2Secret = modp2.computeSecret(exmodp2.getPublicKey())
<ide> .toString('hex'); | 3 |
Go | Go | fix grammatical problems in several annotations | 3c376a882b89bc1ff4f7b47667f49440a516bd70 | <ide><path>api/errdefs/is.go
<ide> func getImplementer(err error) error {
<ide> }
<ide> }
<ide>
<del>// IsNotFound returns if the passed in error is a ErrNotFound
<add>// IsNotFound returns if the passed in error is an ErrNotFound
<ide> func IsNotFound(err error) bool {
<ide> _, ok := getImplementer(err).(ErrNotFound)
<ide> return ok
<ide> func IsInvalidParameter(err error) bool {
<ide> return ok
<ide> }
<ide>
<del>// IsConflict returns if the passed in error is a ErrConflict
<add>// IsConflict returns if the passed in error is an ErrConflict
<ide> func IsConflict(err error) bool {
<ide> _, ok := getImplementer(err).(ErrConflict)
<ide> return ok
<ide> func IsUnavailable(err error) bool {
<ide> return ok
<ide> }
<ide>
<del>// IsForbidden returns if the passed in error is a ErrForbidden
<add>// IsForbidden returns if the passed in error is an ErrForbidden
<ide> func IsForbidden(err error) bool {
<ide> _, ok := getImplementer(err).(ErrForbidden)
<ide> return ok
<ide> }
<ide>
<del>// IsSystem returns if the passed in error is a ErrSystem
<add>// IsSystem returns if the passed in error is an ErrSystem
<ide> func IsSystem(err error) bool {
<ide> _, ok := getImplementer(err).(ErrSystem)
<ide> return ok
<ide> func IsNotModified(err error) bool {
<ide> return ok
<ide> }
<ide>
<del>// IsNotImplemented returns if the passed in error is a ErrNotImplemented
<add>// IsNotImplemented returns if the passed in error is an ErrNotImplemented
<ide> func IsNotImplemented(err error) bool {
<ide> _, ok := getImplementer(err).(ErrNotImplemented)
<ide> return ok | 1 |
Go | Go | remove plugin root from filesystem | 5690730a7471726bbaf813e634a7117a562dfb8c | <ide><path>integration-cli/docker_cli_plugins_test.go
<ide> import (
<ide> "github.com/docker/docker/pkg/integration/checker"
<ide> "github.com/go-check/check"
<ide>
<add> "io/ioutil"
<add> "os"
<add> "os/exec"
<add> "path/filepath"
<ide> "strings"
<ide> )
<ide>
<ide> func (s *DockerSuite) TestPluginBasicOps(c *check.C) {
<ide>
<ide> out, _, err = dockerCmdWithError("plugin", "inspect", pNameWithTag)
<ide> c.Assert(err, checker.IsNil)
<del> c.Assert(out, checker.Contains, "A test plugin for Docker")
<add> tmpFile, err := ioutil.TempFile("", "inspect.json")
<add> c.Assert(err, checker.IsNil)
<add> defer tmpFile.Close()
<add>
<add> if _, err := tmpFile.Write([]byte(out)); err != nil {
<add> c.Fatal(err)
<add> }
<add> // FIXME: When `docker plugin inspect` takes a format as input, jq can be replaced.
<add> id, err := exec.Command("jq", ".Id", "--raw-output", tmpFile.Name()).CombinedOutput()
<add> c.Assert(err, checker.IsNil)
<ide>
<ide> out, _, err = dockerCmdWithError("plugin", "remove", pNameWithTag)
<ide> c.Assert(out, checker.Contains, "is active")
<ide> func (s *DockerSuite) TestPluginBasicOps(c *check.C) {
<ide> out, _, err = dockerCmdWithError("plugin", "remove", pNameWithTag)
<ide> c.Assert(err, checker.IsNil)
<ide> c.Assert(out, checker.Contains, pNameWithTag)
<add>
<add> _, err = os.Stat(filepath.Join(dockerBasePath, "plugins", string(id)))
<add> if !os.IsNotExist(err) {
<add> c.Fatal(err)
<add> }
<ide> }
<ide>
<ide> func (s *DockerSuite) TestPluginInstallDisable(c *check.C) {
<ide><path>plugin/manager.go
<ide> func (pm *Manager) remove(p *plugin) error {
<ide> delete(pm.plugins, p.PluginObj.ID)
<ide> delete(pm.nameToID, p.Name())
<ide> pm.save()
<del> return nil
<add> return os.RemoveAll(filepath.Join(pm.libRoot, p.PluginObj.ID))
<ide> }
<ide>
<ide> func (pm *Manager) set(p *plugin, args []string) error { | 2 |
Text | Text | add single arg scenario for util.format | 78af0df25ed803a59ba39cd7c869bac8d87f0548 | <ide><path>doc/api/util.md
<ide> Each argument is converted to a string using `util.inspect()`.
<ide> util.format(1, 2, 3); // '1 2 3'
<ide> ```
<ide>
<add>If only one argument is passed to `util.format()`, it is returned as it is
<add>without any formatting.
<add>
<add>```js
<add>util.format('%% %s'); // '%% %s'
<add>```
<add>
<ide> ## util.inherits(constructor, superConstructor)
<ide> <!-- YAML
<ide> added: v0.3.0 | 1 |
Python | Python | add tests to check labels, kind and annotations | 62aa7965a32f1f8dde83cb9c763deef5b234092b | <ide><path>chart/tests/test_basic_helm_chart.py
<ide> import unittest
<ide> from subprocess import CalledProcessError
<ide> from typing import Any, Dict, List, Union
<add>from unittest import mock
<ide>
<ide> import jmespath
<ide> from parameterized import parameterized
<ide> def test_network_policies_are_valid(self):
<ide> for kind_name in expected_kind_names:
<ide> assert kind_name in kind_names_tuples
<ide>
<add> def test_labels_are_valid(self):
<add> """Test labels are correctly applied on all objects created by this chart"""
<add> release_name = "TEST-BASIC"
<add> k8s_objects = render_chart(
<add> name=release_name,
<add> values={
<add> "labels": {"label1": "value1", "label2": "value2"},
<add> "executor": "CeleryExecutor",
<add> "pgbouncer": {"enabled": True},
<add> "redis": {"enabled": True},
<add> "networkPolicies": {"enabled": True},
<add> },
<add> )
<add> kind_k8s_obj_labels_tuples = {
<add> (k8s_object['metadata']['name'], k8s_object['kind']): k8s_object['metadata']['labels']
<add> for k8s_object in k8s_objects
<add> }
<add>
<add> kind_names_tuples = [
<add> (f"{release_name}-airflow-config", "ConfigMap", "config"),
<add> (f"{release_name}-create-user", "Job", "create-user-job"),
<add> (f"{release_name}-flower", "Deployment", "flower"),
<add> (f"{release_name}-flower", "Service", "flower"),
<add> (f"{release_name}-flower-policy", "NetworkPolicy", "airflow-flower-policy"),
<add> (f"{release_name}-pgbouncer", "Deployment", "pgbouncer"),
<add> (f"{release_name}-pgbouncer", "Service", "pgbouncer"),
<add> (f"{release_name}-pgbouncer-policy", "NetworkPolicy", "airflow-pgbouncer-policy"),
<add> (f"{release_name}-redis", "Service", "redis"),
<add> (f"{release_name}-redis", "StatefulSet", "redis"),
<add> (f"{release_name}-redis-policy", "NetworkPolicy", "redis-policy"),
<add> (f"{release_name}-run-airflow-migrations", "Job", "run-airflow-migrations"),
<add> (f"{release_name}-scheduler", "Deployment", "scheduler"),
<add> (f"{release_name}-scheduler-policy", "NetworkPolicy", "airflow-scheduler-policy"),
<add> (f"{release_name}-statsd", "Deployment", "statsd"),
<add> (f"{release_name}-statsd", "Service", "statsd"),
<add> (f"{release_name}-statsd-policy", "NetworkPolicy", "statsd-policy"),
<add> (f"{release_name}-webserver", "Deployment", "webserver"),
<add> (f"{release_name}-webserver", "Service", "webserver"),
<add> (f"{release_name}-webserver-policy", "NetworkPolicy", "airflow-webserver-policy"),
<add> (f"{release_name}-worker", "Service", "worker"),
<add> (f"{release_name}-worker", "StatefulSet", "worker"),
<add> (f"{release_name}-worker-policy", "NetworkPolicy", "airflow-worker-policy"),
<add> ]
<add> for k8s_object_name, kind, component in kind_names_tuples:
<add> assert kind_k8s_obj_labels_tuples[(k8s_object_name, kind)] == {
<add> "label1": "value1",
<add> "label2": "value2",
<add> "tier": "airflow",
<add> "release": release_name,
<add> "component": component,
<add> "heritage": "Helm",
<add> "chart": mock.ANY,
<add> }
<add>
<add> def test_annotations_on_airflow_pods_in_deployment(self):
<add> """
<add> Test Annotations are correctly applied on all pods created Scheduler, Webserver & Worker
<add> deployments.
<add> """
<add> release_name = "TEST-BASIC"
<add> k8s_objects = render_chart(
<add> name=release_name,
<add> values={
<add> "airflowPodAnnotations": {"test-annotation/safe-to-evict": "true"},
<add> "executor": "CeleryExecutor",
<add> },
<add> show_only=[
<add> "templates/scheduler/scheduler-deployment.yaml",
<add> "templates/workers/worker-deployment.yaml",
<add> "templates/webserver/webserver-deployment.yaml",
<add> ],
<add> )
<add>
<add> assert len(k8s_objects) == 3
<add>
<add> for k8s_object in k8s_objects:
<add> annotations = k8s_object["spec"]["template"]["metadata"]["annotations"]
<add> assert "test-annotation/safe-to-evict" in annotations
<add> assert "true" in annotations.get("test-annotation/safe-to-evict")
<add>
<ide> def test_chart_is_consistent_with_official_airflow_image(self):
<ide> def get_k8s_objs_with_image(obj: Union[List[Any], Dict[str, Any]]) -> List[Dict[str, Any]]:
<ide> """
<ide><path>chart/tests/test_scheduler.py
<ide> import unittest
<ide>
<ide> import jmespath
<add>from parameterized import parameterized
<ide>
<ide> from tests.helm_template_generator import render_chart
<ide>
<ide>
<ide> class SchedulerTest(unittest.TestCase):
<add> @parameterized.expand(
<add> [
<add> ("CeleryExecutor", False, "Deployment"),
<add> ("CeleryExecutor", True, "Deployment"),
<add> ("CeleryKubernetesExecutor", True, "Deployment"),
<add> ("KubernetesExecutor", True, "Deployment"),
<add> ("LocalExecutor", True, "StatefulSet"),
<add> ("LocalExecutor", False, "Deployment"),
<add> ]
<add> )
<add> def test_scheduler_kind(self, executor, persistence, kind):
<add> """
<add> Test scheduler kind is StatefulSet only when using LocalExecutor &
<add> worker persistence is enabled.
<add> """
<add> docs = render_chart(
<add> values={
<add> "executor": executor,
<add> "workers": {"persistence": {"enabled": persistence}},
<add> },
<add> show_only=["templates/scheduler/scheduler-deployment.yaml"],
<add> )
<add>
<add> assert kind == jmespath.search("kind", docs[0])
<add>
<ide> def test_should_add_extra_containers(self):
<ide> docs = render_chart(
<ide> values={
<ide><path>chart/tests/test_worker.py
<ide> import unittest
<ide>
<ide> import jmespath
<add>from parameterized import parameterized
<ide>
<ide> from tests.helm_template_generator import render_chart
<ide>
<ide>
<ide> class WorkerTest(unittest.TestCase):
<add> @parameterized.expand(
<add> [
<add> ("CeleryExecutor", False, "Deployment"),
<add> ("CeleryExecutor", True, "StatefulSet"),
<add> ("CeleryKubernetesExecutor", False, "Deployment"),
<add> ("CeleryKubernetesExecutor", True, "StatefulSet"),
<add> ]
<add> )
<add> def test_worker_kind(self, executor, persistence, kind):
<add> """
<add> Test worker kind is StatefulSet when worker persistence is enabled.
<add> """
<add> docs = render_chart(
<add> values={
<add> "executor": executor,
<add> "workers": {"persistence": {"enabled": persistence}},
<add> },
<add> show_only=["templates/workers/worker-deployment.yaml"],
<add> )
<add>
<add> assert kind == jmespath.search("kind", docs[0])
<add>
<ide> def test_should_add_extra_containers(self):
<ide> docs = render_chart(
<ide> values={ | 3 |
Python | Python | set version to 2.3.2 | 7ea2cc76508cdcd6d854381f00f1da79309a0df3 | <ide><path>spacy/about.py
<ide> # fmt: off
<ide> __title__ = "spacy"
<del>__version__ = "2.3.1"
<add>__version__ = "2.3.2"
<ide> __release__ = True
<ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download"
<ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json" | 1 |
Text | Text | fix text to follow portuguese language syntax | e2157d0eeaff9b9e9d96896a0efaa31ca7892f5e | <ide><path>curriculum/challenges/portuguese/01-responsive-web-design/css-flexbox/use-the-flex-grow-property-to-expand-items.portuguese.md
<ide> localeTitle: Use a propriedade flex-grow para expandir itens
<ide> ---
<ide>
<ide> ## Description
<del><section id="description"> O oposto de <code>flex-shrink</code> é a propriedade <code>flex-grow</code> . Lembre-se de que o <code>flex-shrink</code> controla o tamanho dos itens quando o contêiner encolhe. A propriedade <code>flex-grow</code> controla o tamanho dos itens quando o contêiner pai se expande. Usando um exemplo semelhante do último desafio, se um item tiver um valor de <code>flex-grow</code> de 1 e o outro tiver um valor de <code>flex-grow</code> de 3, aquele com o valor de 3 crescerá três vezes mais que o outro. </section>
<add><section id="description"> O oposto de <code>flex-shrink</code> é a propriedade <code>flex-grow</code> . Lembre-se de que <code>flex-shrink</code> controla o tamanho dos itens quando o contêiner encolhe. A propriedade <code>flex-grow</code> controla o tamanho dos itens quando o contêiner pai se expande.
<add>Usando um exemplo semelhante do último desafio, se um item tiver um valor de <code>flex-grow</code> igual a 1 e o outro tiver um valor de <code>flex-grow</code> igual a 3, aquele com o valor de 3 crescerá três vezes mais que o outro. </section>
<ide>
<ide> ## Instructions
<del><section id="instructions"> Adicione o <code>flex-grow</code> propriedade CSS a <code>#box-1</code> e <code>#box-2</code> . Dê <code>#box-1</code> um valor de 1 e <code>#box-2</code> um valor de 2. </section>
<add><section id="instructions"> Adicione a propriedade CSS <code>flex-grow</code> a ambos <code>#box-1</code> e <code>#box-2</code> . Dê a <code>#box-1</code> um valor de 1 e a <code>#box-2</code> um valor de 2. </section>
<ide>
<ide> ## Tests
<ide> <section id='tests'> | 1 |
Go | Go | fix event removal | 670564d07096c190011bf7adf74afe4d61ef9720 | <ide><path>server/server.go
<ide> func (srv *Server) ContainerKill(job *engine.Job) engine.Status {
<ide> return engine.StatusOK
<ide> }
<ide>
<add>func (srv *Server) EvictListener(from string) {
<add> srv.Lock()
<add> if old, ok := srv.listeners[from]; ok {
<add> delete(srv.listeners, from)
<add> close(old)
<add> }
<add> srv.Unlock()
<add>}
<add>
<ide> func (srv *Server) Events(job *engine.Job) engine.Status {
<ide> if len(job.Args) != 1 {
<ide> return job.Errorf("Usage: %s FROM", job.Name)
<ide> func (srv *Server) Events(job *engine.Job) engine.Status {
<ide> return fmt.Errorf("JSON error")
<ide> }
<ide> _, err = job.Stdout.Write(b)
<del> if err != nil {
<del> // On error, evict the listener
<del> utils.Errorf("%s", err)
<del> srv.Lock()
<del> delete(srv.listeners, from)
<del> srv.Unlock()
<del> return err
<del> }
<del> return nil
<add> return err
<ide> }
<ide>
<ide> listener := make(chan utils.JSONMessage)
<ide> func (srv *Server) Events(job *engine.Job) engine.Status {
<ide> continue
<ide> }
<ide> if err != nil {
<del> job.Error(err)
<del> return engine.StatusErr
<add> // On error, evict the listener
<add> srv.EvictListener(from)
<add> return job.Error(err)
<ide> }
<ide> }
<ide> }
<ide> func (srv *Server) Events(job *engine.Job) engine.Status {
<ide> continue
<ide> }
<ide> if err != nil {
<add> // On error, evict the listener
<add> srv.EvictListener(from)
<ide> return job.Error(err)
<ide> }
<ide> case <-timeout.C: | 1 |
Python | Python | expend ccompiler tests to cover s390x features | 9cedd787f630fadcff4d256b89aa6ea19bdcdeef | <ide><path>numpy/distutils/tests/test_ccompiler_opt.py
<ide> def assert_(expr, msg=''):
<ide> ppc64le = ("gcc", "clang"),
<ide> armhf = ("gcc", "clang"),
<ide> aarch64 = ("gcc", "clang"),
<add> s390x = ("gcc", "clang"),
<ide> noarch = ("gcc",)
<ide> )
<ide>
<ide> def test_args_options(self):
<ide> if o == "native" and self.cc_name() == "msvc":
<ide> continue
<ide> self.expect(o,
<del> trap_files=".*cpu_(sse|vsx|neon).c",
<del> x86="", ppc64="", armhf=""
<add> trap_files=".*cpu_(sse|vsx|neon|vx).c",
<add> x86="", ppc64="", armhf="", s390x=""
<ide> )
<ide> self.expect(o,
<del> trap_files=".*cpu_(sse3|vsx2|neon_vfpv4).c",
<add> trap_files=".*cpu_(sse3|vsx2|neon_vfpv4|vxe).c",
<ide> x86="sse sse2", ppc64="vsx", armhf="neon neon_fp16",
<del> aarch64="", ppc64le=""
<add> aarch64="", ppc64le="", s390x="vx"
<ide> )
<ide> self.expect(o,
<ide> trap_files=".*cpu_(popcnt|vsx3).c",
<ide> x86="sse .* sse41", ppc64="vsx vsx2",
<del> armhf="neon neon_fp16 .* asimd .*"
<add> armhf="neon neon_fp16 .* asimd .*",
<add> s390x="vx vxe vxe2"
<ide> )
<ide> self.expect(o,
<ide> x86_gcc=".* xop fma4 .* avx512f .* avx512_knl avx512_knm avx512_skx .*",
<ide> def test_args_options(self):
<ide> # in msvc, avx512_knl avx512_knm aren't supported
<ide> x86_msvc=".* xop fma4 .* avx512f .* avx512_skx .*",
<ide> armhf=".* asimd asimdhp asimddp .*",
<del> ppc64="vsx vsx2 vsx3.*"
<add> ppc64="vsx vsx2 vsx3.*",
<add> s390x="vx vxe vxe2.*"
<ide> )
<ide> # min
<ide> self.expect("min",
<ide> x86="sse sse2", x64="sse sse2 sse3",
<ide> armhf="", aarch64="neon neon_fp16 .* asimd",
<del> ppc64="", ppc64le="vsx vsx2"
<add> ppc64="", ppc64le="vsx vsx2", s390x=""
<ide> )
<ide> self.expect(
<ide> "min", trap_files=".*cpu_(sse2|vsx2).c",
<ide> def test_args_options(self):
<ide> try:
<ide> self.expect("native",
<ide> trap_flags=".*(-march=native|-xHost|/QxHost).*",
<del> x86=".*", ppc64=".*", armhf=".*"
<add> x86=".*", ppc64=".*", armhf=".*", s390x=".*"
<ide> )
<ide> if self.march() != "unknown":
<ide> raise AssertionError(
<ide> def test_args_options(self):
<ide>
<ide> def test_flags(self):
<ide> self.expect_flags(
<del> "sse sse2 vsx vsx2 neon neon_fp16",
<add> "sse sse2 vsx vsx2 neon neon_fp16 vx vxe",
<ide> x86_gcc="-msse -msse2", x86_icc="-msse -msse2",
<ide> x86_iccw="/arch:SSE2",
<ide> x86_msvc="/arch:SSE2" if self.march() == "x86" else "",
<ide> ppc64_gcc= "-mcpu=power8",
<ide> ppc64_clang="-maltivec -mvsx -mpower8-vector",
<ide> armhf_gcc="-mfpu=neon-fp16 -mfp16-format=ieee",
<del> aarch64=""
<add> aarch64="",
<add> s390="-mzvector -march=arch12"
<ide> )
<ide> # testing normalize -march
<ide> self.expect_flags(
<ide> def test_targets_exceptions(self):
<ide> try:
<ide> self.expect_targets(
<ide> targets,
<del> x86="", armhf="", ppc64=""
<add> x86="", armhf="", ppc64="", s390x=""
<ide> )
<ide> if self.march() != "unknown":
<ide> raise AssertionError(
<ide> def test_targets_exceptions(self):
<ide>
<ide> def test_targets_syntax(self):
<ide> for targets in (
<del> "/*@targets $keep_baseline sse vsx neon*/",
<del> "/*@targets,$keep_baseline,sse,vsx,neon*/",
<del> "/*@targets*$keep_baseline*sse*vsx*neon*/",
<add> "/*@targets $keep_baseline sse vsx neon vx*/",
<add> "/*@targets,$keep_baseline,sse,vsx,neon vx*/",
<add> "/*@targets*$keep_baseline*sse*vsx*neon*vx*/",
<ide> """
<ide> /*
<ide> ** @targets
<del> ** $keep_baseline, sse vsx,neon
<add> ** $keep_baseline, sse vsx,neon, vx
<ide> */
<ide> """,
<ide> """
<ide> /*
<del> ************@targets*************
<del> ** $keep_baseline, sse vsx, neon
<del> *********************************
<add> ************@targets****************
<add> ** $keep_baseline, sse vsx, neon, vx
<add> ************************************
<ide> */
<ide> """,
<ide> """
<ide> /*
<ide> /////////////@targets/////////////////
<del> //$keep_baseline//sse//vsx//neon
<add> //$keep_baseline//sse//vsx//neon//vx
<ide> /////////////////////////////////////
<ide> */
<ide> """,
<ide> """
<ide> /*
<ide> @targets
<ide> $keep_baseline
<del> SSE VSX NEON*/
<add> SSE VSX NEON VX*/
<ide> """
<ide> ) :
<ide> self.expect_targets(targets,
<del> x86="sse", ppc64="vsx", armhf="neon", unknown=""
<add> x86="sse", ppc64="vsx", armhf="neon", s390x="vx", unknown=""
<ide> )
<ide>
<ide> def test_targets(self):
<ide> def test_targets(self):
<ide> sse sse2 sse41 avx avx2 avx512f
<ide> vsx vsx2 vsx3
<ide> neon neon_fp16 asimdhp asimddp
<add> vx vxe vxe2
<ide> */
<ide> """,
<del> baseline="avx vsx2 asimd",
<del> x86="avx512f avx2", armhf="asimddp asimdhp", ppc64="vsx3"
<add> baseline="avx vsx2 asimd vx vxe",
<add> x86="avx512f avx2", armhf="asimddp asimdhp", ppc64="vsx3",
<add> s390x="vxe2"
<ide> )
<ide> # test skipping non-dispatch features
<ide> self.expect_targets(
<ide> def test_targets(self):
<ide> sse41 avx avx2 avx512f
<ide> vsx2 vsx3
<ide> asimd asimdhp asimddp
<add> vx vxe vxe2
<ide> */
<ide> """,
<del> baseline="", dispatch="sse41 avx2 vsx2 asimd asimddp",
<del> x86="avx2 sse41", armhf="asimddp asimd", ppc64="vsx2"
<add> baseline="", dispatch="sse41 avx2 vsx2 asimd asimddp vxe2",
<add> x86="avx2 sse41", armhf="asimddp asimd", ppc64="vsx2", s390x="vxe2"
<ide> )
<ide> # test skipping features that not supported
<ide> self.expect_targets(
<ide> def test_targets(self):
<ide> sse2 sse41 avx2 avx512f
<ide> vsx2 vsx3
<ide> neon asimdhp asimddp
<add> vx vxe vxe2
<ide> */
<ide> """,
<ide> baseline="",
<del> trap_files=".*(avx2|avx512f|vsx3|asimddp).c",
<del> x86="sse41 sse2", ppc64="vsx2", armhf="asimdhp neon"
<add> trap_files=".*(avx2|avx512f|vsx3|asimddp|vxe2).c",
<add> x86="sse41 sse2", ppc64="vsx2", armhf="asimdhp neon",
<add> s390x="vxe vx"
<ide> )
<ide> # test skipping features that implies each other
<ide> self.expect_targets(
<ide> def test_targets_policies(self):
<ide> sse2 sse42 avx2 avx512f
<ide> vsx2 vsx3
<ide> neon neon_vfpv4 asimd asimddp
<add> vx vxe vxe2
<ide> */
<ide> """,
<del> baseline="sse41 avx2 vsx2 asimd vsx3",
<add> baseline="sse41 avx2 vsx2 asimd vsx3 vxe",
<ide> x86="avx512f avx2 sse42 sse2",
<ide> ppc64="vsx3 vsx2",
<ide> armhf="asimddp asimd neon_vfpv4 neon",
<ide> # neon, neon_vfpv4, asimd implies each other
<del> aarch64="asimddp asimd"
<add> aarch64="asimddp asimd",
<add> s390x="vxe2 vxe vx"
<ide> )
<ide> # 'keep_sort', leave the sort as-is
<ide> self.expect_targets(
<ide> def test_targets_policies(self):
<ide> avx512f sse42 avx2 sse2
<ide> vsx2 vsx3
<ide> asimd neon neon_vfpv4 asimddp
<add> vxe vxe2
<ide> */
<ide> """,
<ide> x86="avx512f sse42 avx2 sse2",
<ide> ppc64="vsx2 vsx3",
<ide> armhf="asimd neon neon_vfpv4 asimddp",
<ide> # neon, neon_vfpv4, asimd implies each other
<del> aarch64="asimd asimddp"
<add> aarch64="asimd asimddp",
<add> s390x="vxe vxe2"
<ide> )
<ide> # 'autovec', skipping features that can't be
<ide> # vectorized by the compiler
<ide> def test_targets_multi(self):
<ide> (sse41 avx sse42) (sse3 avx2 avx512f)
<ide> (vsx vsx3 vsx2)
<ide> (asimddp neon neon_vfpv4 asimd asimdhp)
<add> (vx vxe vxe2)
<ide> */
<ide> """,
<ide> x86="avx avx512f",
<ide> ppc64="vsx3",
<ide> armhf=r"\(asimdhp asimddp\)",
<add> s390x="vxe2"
<ide> )
<ide> # test compiler variety and avoiding duplicating
<ide> self.expect_targets( | 1 |
Python | Python | fix lint error for cloud_lib | 7ba78e942757b1911e93bd298ad07492ca8fc17e | <ide><path>official/utils/logs/cloud_lib.py
<ide>
<ide>
<ide> def on_gcp():
<del> """Detect whether the current running environment is on GCP"""
<add> """Detect whether the current running environment is on GCP."""
<ide> try:
<ide> response = requests.get(GCP_METADATA_URL, headers=GCP_METADATA_HEADER)
<ide> return response.status_code == 200 | 1 |