code
stringlengths 1
5.19M
| package
stringlengths 1
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
OPENAPI_FETCH_REQUEST,
OPENAPI_FETCH_SUCCESS,
OPENAPI_FETCH_FAIL,
} from '../actions/openapi'
export default (state = {
isFetching: false,
openapi: null,
}, action) => {
switch (action.type) {
case OPENAPI_FETCH_REQUEST:
case OPENAPI_FETCH_FAIL:
return {
isFetching: true,
tenant: state.openapi,
}
case OPENAPI_FETCH_SUCCESS:
return {
isFetching: false,
openapi: action.openapi,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/openapi.js
|
openapi.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import { TENANT_SET } from '../actions/tenant'
// undefined name means we haven't loaded anything yet; null means
// outside of tenant context.
export default (state = {name: undefined}, action) => {
switch (action.type) {
case TENANT_SET:
return action.tenant
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/tenant.js
|
tenant.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
JOBS_FETCH_FAIL,
JOBS_FETCH_REQUEST,
JOBS_FETCH_SUCCESS
} from '../actions/jobs'
export default (state = {
isFetching: false,
jobs: {},
}, action) => {
switch (action.type) {
case JOBS_FETCH_REQUEST:
return {
isFetching: true,
jobs: state.jobs,
}
case JOBS_FETCH_SUCCESS:
return {
isFetching: false,
jobs: {
...state.jobs,
[action.tenant]: action.jobs
},
}
case JOBS_FETCH_FAIL:
return {
isFetching: false,
jobs: state.jobs,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/jobs.js
|
jobs.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
SEMAPHORES_FETCH_FAIL,
SEMAPHORES_FETCH_REQUEST,
SEMAPHORES_FETCH_SUCCESS
} from '../actions/semaphores'
export default (state = {
isFetching: false,
semaphores: {},
}, action) => {
switch (action.type) {
case SEMAPHORES_FETCH_REQUEST:
return {
isFetching: true,
semaphores: state.semaphores,
}
case SEMAPHORES_FETCH_SUCCESS:
return {
isFetching: false,
semaphores: {
...state.semaphores,
[action.tenant]: action.semaphores
}
}
case SEMAPHORES_FETCH_FAIL:
return {
isFetching: false,
semaphores: state.semaphores,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/semaphores.js
|
semaphores.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
PROJECT_FETCH_FAIL,
PROJECT_FETCH_REQUEST,
PROJECT_FETCH_SUCCESS
} from '../actions/project'
export default (state = {
isFetching: false,
projects: {},
}, action) => {
let state_projects
switch (action.type) {
case PROJECT_FETCH_REQUEST:
return {
isFetching: true,
projects: state.projects,
}
case PROJECT_FETCH_SUCCESS:
state_projects = !state.projects[action.tenant] ?
{ ...state.projects, [action.tenant]: {} } :
{ ...state.projects }
return {
isFetching: false,
projects: {
...state_projects,
[action.tenant]: {
...state_projects[action.tenant],
[action.projectName]: action.project
}
}
}
case PROJECT_FETCH_FAIL:
return {
isFetching: false,
projects: state.projects,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/project.js
|
project.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
ADD_NOTIFICATION,
CLEAR_NOTIFICATION,
CLEAR_NOTIFICATIONS,
addApiError,
} from '../actions/notifications'
export default (state = [], action) => {
// Intercept API failure
// TODO: Are these still used?
if (action.notification && action.type.match(/.*_FETCH_FAIL$/)) {
action = addApiError(action.notification)
}
// Intercept Admin API failures
if (action.notification && action.type.match(/ADMIN_.*_FAIL$/)) {
action = addApiError(action.notification)
}
switch (action.type) {
case ADD_NOTIFICATION:
if (state.filter(notification => (
notification.url === action.notification.url &&
notification.status === action.notification.status)).length > 0)
return state
return [
...state,
{ ...action.notification, id: action.id, date: Date.now() }]
case CLEAR_NOTIFICATION:
return state.filter(item => (item.id !== action.id))
case CLEAR_NOTIFICATIONS:
return []
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/notifications.js
|
notifications.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import { TENANT_SET } from '../actions/tenant'
import {
STATUS_FETCH_FAIL,
STATUS_FETCH_REQUEST,
STATUS_FETCH_SUCCESS
} from '../actions/status'
export default (state = {
isFetching: false,
status: null
}, action) => {
switch (action.type) {
case TENANT_SET:
return {
isFetching: false,
status: null,
}
case STATUS_FETCH_REQUEST:
return {
isFetching: true,
status: state.status
}
case STATUS_FETCH_SUCCESS:
return {
isFetching: false,
status: action.status,
}
case STATUS_FETCH_FAIL:
return {
isFetching: false,
status: state.status,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/status.js
|
status.js
|
export default {
build: {
builds: {},
buildsets: {},
// Store outputs, manifest, hosts and errorIds separately from the build.
// This allows us to fetch everything in parallel and we don't have to wait
// until the build is available. We also don't have to update the actual
// build object with new information everytime.
// To simplify the usage we can map everything into a single build object
// in the mapStateToProps() function in the build page.
outputs: {},
manifests: {},
hosts: {},
errorIds: {},
isFetching: false,
isFetchingOutput: false,
isFetchingManifest: false,
},
component: {
components: undefined,
isFetching: false,
},
logfile: {
// Store files by buildId->filename->content
files: {},
isFetching: true,
url: null,
},
auth: {},
user: {},
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/initialState.js
|
initialState.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
NODES_FETCH_FAIL,
NODES_FETCH_REQUEST,
NODES_FETCH_SUCCESS
} from '../actions/nodes'
export default (state = {
receivedAt: 0,
isFetching: false,
nodes: [],
}, action) => {
switch (action.type) {
case NODES_FETCH_REQUEST:
return {
...state,
isFetching: true,
}
case NODES_FETCH_SUCCESS:
return {
...state,
isFetching: false,
nodes: action.nodes,
receivedAt: action.receivedAt,
}
case NODES_FETCH_FAIL:
return {
...state,
isFetching: false
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/nodes.js
|
nodes.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
PREFERENCE_SET,
} from '../actions/preferences'
import { resolveDarkMode, setDarkMode } from '../Misc'
const stored_prefs = localStorage.getItem('preferences')
let default_prefs
if (stored_prefs === null) {
default_prefs = {
autoReload: true,
theme: 'Auto'
}
} else {
default_prefs = JSON.parse(stored_prefs)
}
export default (state = {
...default_prefs
}, action) => {
if (action.type === PREFERENCE_SET) {
let newstate = { ...state, [action.key]: action.value }
delete newstate.darkMode
localStorage.setItem('preferences', JSON.stringify(newstate))
let darkMode = resolveDarkMode(newstate.theme)
setDarkMode(darkMode)
return { ...newstate, darkMode: darkMode }
}
let darkMode = resolveDarkMode(state.theme)
setDarkMode(darkMode)
return { ...state, darkMode: darkMode }
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/preferences.js
|
preferences.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
CHANGE_FETCH_FAIL,
CHANGE_FETCH_REQUEST,
CHANGE_FETCH_SUCCESS
} from '../actions/change'
export default (state = {
isFetching: false,
change: null
}, action) => {
switch (action.type) {
case CHANGE_FETCH_REQUEST:
return {
isFetching: true,
change: state.change
}
case CHANGE_FETCH_SUCCESS:
return {
isFetching: false,
change: action.change,
}
case CHANGE_FETCH_FAIL:
return {
isFetching: false,
change: state.change,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/change.js
|
change.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
TENANTS_FETCH_FAIL,
TENANTS_FETCH_REQUEST,
TENANTS_FETCH_SUCCESS
} from '../actions/tenants'
export default (state = {
isFetching: false,
tenants: []
}, action) => {
switch (action.type) {
case TENANTS_FETCH_REQUEST:
return {
isFetching: true,
tenants: state.tenants
}
case TENANTS_FETCH_SUCCESS:
return {
isFetching: false,
tenants: action.tenants,
}
case TENANTS_FETCH_FAIL:
return {
isFetching: false,
tenants: state.tenants,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/tenants.js
|
tenants.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
FREEZE_JOB_FETCH_FAIL,
FREEZE_JOB_FETCH_REQUEST,
FREEZE_JOB_FETCH_SUCCESS
} from '../actions/freezejob'
export default (state = {
isFetching: false,
freezeJobs: {},
}, action) => {
let stateFreezeJobs
switch (action.type) {
case FREEZE_JOB_FETCH_REQUEST:
return {
isFetching: true,
freezeJobs: state.freezeJobs,
}
case FREEZE_JOB_FETCH_SUCCESS:
stateFreezeJobs = !state.freezeJobs[action.tenant] ?
{ ...state.freezeJobs, [action.tenant]: {} } :
{ ...state.freezeJobs }
return {
isFetching: false,
freezeJobs: {
...stateFreezeJobs,
[action.tenant]: {
...stateFreezeJobs[action.tenant],
[action.freezeJobKey]: action.freezeJob
}
}
}
case FREEZE_JOB_FETCH_FAIL:
return {
isFetching: false,
freezeJobs: state.freezeJobs,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/freezejob.js
|
freezejob.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
JOB_FETCH_FAIL,
JOB_FETCH_REQUEST,
JOB_FETCH_SUCCESS
} from '../actions/job'
export default (state = {
isFetching: false,
jobs: {},
}, action) => {
switch (action.type) {
case JOB_FETCH_REQUEST:
return {
isFetching: true,
jobs: state.jobs,
}
case JOB_FETCH_SUCCESS:
return {
...state,
isFetching: false,
jobs: {
...state.jobs,
[action.tenant]: {
...state.jobs[action.tenant],
[action.jobname]: action.job
}
}
}
case JOB_FETCH_FAIL:
return {
isFetching: false,
jobs: state.jobs,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/job.js
|
job.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
LOGFILE_FETCH_FAIL,
LOGFILE_FETCH_REQUEST,
LOGFILE_FETCH_SUCCESS,
} from '../actions/logfile'
import initialState from './initialState'
export default (state = initialState.logfile, action) => {
switch (action.type) {
case LOGFILE_FETCH_REQUEST:
return { ...state, isFetching: true, url: action.url }
case LOGFILE_FETCH_SUCCESS: {
let filesForBuild = state.files[action.buildId] || {}
filesForBuild = {
...filesForBuild,
[action.fileName]: action.fileContent,
}
return {
...state,
isFetching: false,
files: { ...state.files, [action.buildId]: filesForBuild },
}
}
case LOGFILE_FETCH_FAIL:
return { ...state, isFetching: false }
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/logfile.js
|
logfile.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
PIPELINES_FETCH_FAIL,
PIPELINES_FETCH_REQUEST,
PIPELINES_FETCH_SUCCESS
} from '../actions/pipelines'
export default (state = {
isFetching: false,
pipelines: {},
}, action) => {
switch (action.type) {
case PIPELINES_FETCH_REQUEST:
return {
isFetching: true,
pipelines: state.pipelines,
}
case PIPELINES_FETCH_SUCCESS:
return {
isFetching: false,
pipelines: { ...state.pipelines, [action.tenant]: action.pipelines },
}
case PIPELINES_FETCH_FAIL:
return {
isFetching: false,
pipelines: state.pipelines,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/pipelines.js
|
pipelines.js
|
// Copyright 2021 BMW Group
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as types from '../actions/actionTypes'
import initialState from './initialState'
export default (state = initialState.component, action) => {
switch (action.type) {
case types.COMPONENTS_FETCH_REQUEST:
return { ...state, isFetching: true }
case types.COMPONENTS_FETCH_SUCCESS:
return { ...state, components: action.components, isFetching: false }
case types.COMPONENTS_FETCH_FAIL:
return { ...state, components: {}, isFetching: false }
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/component.js
|
component.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import { combineReducers } from 'redux'
import auth from './auth'
import autoholds from './autoholds'
import configErrors from './configErrors'
import change from './change'
import component from './component'
import freezejob from './freezejob'
import notifications from './notifications'
import build from './build'
import info from './info'
import job from './job'
import jobgraph from './jobgraph'
import jobs from './jobs'
import labels from './labels'
import logfile from './logfile'
import nodes from './nodes'
import openapi from './openapi'
import project from './project'
import pipelines from './pipelines'
import projects from './projects'
import preferences from './preferences'
import semaphores from './semaphores'
import status from './status'
import tenant from './tenant'
import tenants from './tenants'
import timezone from './timezone'
import user from './user'
const reducers = {
auth,
autoholds,
build,
change,
component,
configErrors,
freezejob,
notifications,
info,
job,
jobgraph,
jobs,
labels,
logfile,
nodes,
openapi,
pipelines,
project,
projects,
semaphores,
status,
tenant,
tenants,
timezone,
preferences,
user,
}
export default combineReducers(reducers)
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/index.js
|
index.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
BUILD_FETCH_FAIL,
BUILD_FETCH_REQUEST,
BUILD_FETCH_SUCCESS,
BUILDSET_FETCH_FAIL,
BUILDSET_FETCH_REQUEST,
BUILDSET_FETCH_SUCCESS,
BUILD_OUTPUT_FAIL,
BUILD_OUTPUT_REQUEST,
BUILD_OUTPUT_SUCCESS,
BUILD_OUTPUT_NOT_AVAILABLE,
BUILD_MANIFEST_FAIL,
BUILD_MANIFEST_REQUEST,
BUILD_MANIFEST_SUCCESS,
BUILD_MANIFEST_NOT_AVAILABLE,
} from '../actions/build'
import initialState from './initialState'
export default (state = initialState.build, action) => {
switch (action.type) {
case BUILD_FETCH_REQUEST:
case BUILDSET_FETCH_REQUEST:
return { ...state, isFetching: true }
case BUILD_FETCH_SUCCESS:
return {
...state,
builds: { ...state.builds, [action.buildId]: action.build },
isFetching: false,
}
case BUILDSET_FETCH_SUCCESS:
return {
...state,
buildsets: { ...state.buildsets, [action.buildsetId]: action.buildset },
isFetching: false,
}
case BUILD_FETCH_FAIL:
return {
...state,
builds: { ...state.builds, [action.buildId]: null },
isFetching: false,
}
case BUILDSET_FETCH_FAIL:
return {
...state,
buildsets: { ...state.buildsets, [action.buildsetId]: null },
isFetching: false,
}
case BUILD_OUTPUT_REQUEST:
return { ...state, isFetchingOutput: true }
case BUILD_OUTPUT_SUCCESS:
return {
...state,
outputs: { ...state.outputs, [action.buildId]: action.output },
errorIds: { ...state.errorIds, [action.buildId]: action.errorIds },
hosts: { ...state.hosts, [action.buildId]: action.hosts },
isFetchingOutput: false,
}
case BUILD_OUTPUT_FAIL:
case BUILD_OUTPUT_NOT_AVAILABLE:
return {
...state,
outputs: { ...state.outputs, [action.buildId]: null },
errorIds: { ...state.errorIds, [action.buildId]: null },
hosts: { ...state.hosts, [action.buildId]: null },
isFetchingOutput: false,
}
case BUILD_MANIFEST_REQUEST:
return { ...state, isFetchingManifest: true }
case BUILD_MANIFEST_SUCCESS:
return {
...state,
manifests: { ...state.manifests, [action.buildId]: action.manifest },
isFetchingManifest: false,
}
case BUILD_MANIFEST_FAIL:
case BUILD_MANIFEST_NOT_AVAILABLE:
return {
...state,
manifests: { ...state.manifests, [action.buildId]: null },
isFetchingManifest: false,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/build.js
|
build.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
LABELS_FETCH_FAIL,
LABELS_FETCH_REQUEST,
LABELS_FETCH_SUCCESS
} from '../actions/labels'
export default (state = {
isFetching: false,
labels: {},
}, action) => {
switch (action.type) {
case LABELS_FETCH_REQUEST:
return {
isFetching: true,
labels: state.labels,
}
case LABELS_FETCH_SUCCESS:
return {
isFetching: false,
labels: {
...state.labels,
[action.tenant]: action.labels,
}
}
case LABELS_FETCH_FAIL:
return {
isFetching: false,
labels: state.labels,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/labels.js
|
labels.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
AUTOHOLDS_FETCH_FAIL,
AUTOHOLDS_FETCH_REQUEST,
AUTOHOLDS_FETCH_SUCCESS,
AUTOHOLD_FETCH_FAIL,
AUTOHOLD_FETCH_REQUEST,
AUTOHOLD_FETCH_SUCCESS
} from '../actions/autoholds'
export default (state = {
receivedAt: 0,
isFetching: false,
autoholds: [],
autohold: null,
}, action) => {
switch (action.type) {
case AUTOHOLDS_FETCH_REQUEST:
return {
...state,
isFetching: true,
}
case AUTOHOLDS_FETCH_SUCCESS:
return {
...state,
isFetching: false,
autoholds: action.autoholds,
receivedAt: action.receivedAt,
}
case AUTOHOLDS_FETCH_FAIL:
return {
...state,
isFetching: false,
}
case AUTOHOLD_FETCH_REQUEST:
return {
...state,
isFetching: true,
}
case AUTOHOLD_FETCH_SUCCESS:
return {
...state,
isFetching: false,
autohold: action.autohold,
receivedAt: action.receivedAt,
}
case AUTOHOLD_FETCH_FAIL:
return {
...state,
isFetching: false
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/autoholds.js
|
autoholds.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
export default (state = {errors: [], ready: false}, action) => {
switch (action.type) {
case 'CONFIGERRORS_FETCH_SUCCESS':
return {errors: action.errors, ready: true}
case 'CONFIGERRORS_FETCH_FAIL':
return {errors: [], ready: true}
case 'CONFIGERRORS_CLEAR':
return {errors: [], ready: false}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/configErrors.js
|
configErrors.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
JOB_GRAPH_FETCH_FAIL,
JOB_GRAPH_FETCH_REQUEST,
JOB_GRAPH_FETCH_SUCCESS
} from '../actions/jobgraph'
export default (state = {
isFetching: false,
jobGraphs: {},
}, action) => {
let stateJobGraphs
switch (action.type) {
case JOB_GRAPH_FETCH_REQUEST:
return {
isFetching: true,
jobGraphs: state.jobGraphs,
}
case JOB_GRAPH_FETCH_SUCCESS:
stateJobGraphs = !state.jobGraphs[action.tenant] ?
{ ...state.jobGraphs, [action.tenant]: {} } :
{ ...state.jobGraphs }
return {
isFetching: false,
jobGraphs: {
...stateJobGraphs,
[action.tenant]: {
...stateJobGraphs[action.tenant],
[action.jobGraphKey]: action.jobGraph
}
}
}
case JOB_GRAPH_FETCH_FAIL:
return {
isFetching: false,
jobGraphs: state.jobGraphs,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/jobgraph.js
|
jobgraph.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
AUTH_CONFIG_REQUEST,
AUTH_CONFIG_SUCCESS,
AUTH_CONFIG_FAIL,
} from '../actions/auth'
// Load the defaults from local storage if it exists so that we
// construct the same AuthProvider we had before we navigated to the
// IDP redirect.
const stored_params = localStorage.getItem('zuul_auth_params')
let auth_params = {
authority: '',
client_id: '',
scope: '',
loadUserInfo: true,
}
if (stored_params !== null) {
auth_params = JSON.parse(stored_params)
}
export default (state = {
isFetching: false,
info: null,
auth_params: auth_params,
}, action) => {
const json_params = JSON.stringify(action.auth_params)
switch (action.type) {
case AUTH_CONFIG_REQUEST:
return {
...state,
isFetching: true,
info: null,
}
case AUTH_CONFIG_SUCCESS:
// Make sure we only update the auth_params object if something actually
// changes. Otherwise, it will re-create the AuthProvider which
// may cause errors with auth state if it happens concurrently with
// a login.
if (json_params === JSON.stringify(state.auth_params)) {
return {
...state,
isFetching: false,
info: action.info,
}
} else {
localStorage.setItem('zuul_auth_params', json_params)
return {
...state,
isFetching: false,
info: action.info,
auth_params: action.auth_params,
}
}
case AUTH_CONFIG_FAIL:
return {
...state,
isFetching: false,
info: null,
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/auth.js
|
auth.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import {
INFO_FETCH_REQUEST,
INFO_FETCH_SUCCESS,
INFO_FETCH_FAIL,
} from '../actions/info'
export default (state = {
isFetching: false,
tenant: null,
capabilities: null,
}, action) => {
switch (action.type) {
case INFO_FETCH_REQUEST:
case INFO_FETCH_FAIL:
return {
isFetching: true,
tenant: null,
}
case INFO_FETCH_SUCCESS:
return {
isFetching: false,
tenant: action.tenant,
capabilities: action.capabilities,
ready: true
}
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/info.js
|
info.js
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import { TIMEZONE_SET } from '../actions/timezone'
export default (state = 'UTC', action) => {
switch (action.type) {
case TIMEZONE_SET:
return action.timezone
default:
return state
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/reducers/timezone.js
|
timezone.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
import { USER_ACL_FAIL, USER_ACL_REQUEST, USER_ACL_SUCCESS } from './auth'
export const USER_LOGGED_IN = 'USER_LOGGED_IN'
export const USER_LOGGED_OUT = 'USER_LOGGED_OUT'
// Access tokens are not necessary JWTs (Google OAUTH uses a custom format)
// check the access token, if it isn't a JWT, use the ID token
export function getToken(user) {
try {
JSON.parse(atob(user.access_token.split('.')[1]))
return user.access_token
} catch (e) {
return user.id_token
}
}
export const fetchUserACLRequest = (tenant) => ({
type: USER_ACL_REQUEST,
tenant: tenant,
})
export const userLoggedIn = (user, redirect) => (dispatch) => {
const token = getToken(user)
API.setAuthToken(token)
dispatch({
type: USER_LOGGED_IN,
user: user,
token: token,
redirect: redirect,
})
}
export const userLoggedOut = () => (dispatch) => {
dispatch({
type: USER_LOGGED_OUT,
})
}
const fetchUserACLSuccess = (json) => ({
type: USER_ACL_SUCCESS,
isAdmin: json.zuul.admin,
scope: json.zuul.scope,
})
const fetchUserACLFail = error => ({
type: USER_ACL_FAIL,
error
})
export const fetchUserACL = (tenant) => (dispatch) => {
dispatch(fetchUserACLRequest(tenant))
let apiPrefix = tenant? 'tenant/' + tenant + '/' : ''
return API.fetchUserAuthorizations(apiPrefix)
.then(response => dispatch(fetchUserACLSuccess(response.data)))
.catch(error => {
dispatch(fetchUserACLFail(error))
})
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/user.js
|
user.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const PROJECTS_FETCH_REQUEST = 'PROJECTS_FETCH_REQUEST'
export const PROJECTS_FETCH_SUCCESS = 'PROJECTS_FETCH_SUCCESS'
export const PROJECTS_FETCH_FAIL = 'PROJECTS_FETCH_FAIL'
export const requestProjects = () => ({
type: PROJECTS_FETCH_REQUEST
})
export const receiveProjects = (tenant, json) => ({
type: PROJECTS_FETCH_SUCCESS,
tenant: tenant,
projects: json,
receivedAt: Date.now()
})
const failedProjects = error => ({
type: PROJECTS_FETCH_FAIL,
error
})
const fetchProjects = (tenant) => dispatch => {
dispatch(requestProjects())
return API.fetchProjects(tenant.apiPrefix)
.then(response => dispatch(receiveProjects(tenant.name, response.data)))
.catch(error => dispatch(failedProjects(error)))
}
const shouldFetchProjects = (tenant, state) => {
const projects = state.projects.projects[tenant.name]
if (!projects || projects.length === 0) {
return true
}
if (projects.isFetching) {
return false
}
return false
}
export const fetchProjectsIfNeeded = (tenant, force) => (
dispatch, getState) => {
if (force || shouldFetchProjects(tenant, getState())) {
return dispatch(fetchProjects(tenant))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/projects.js
|
projects.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
import yaml from 'js-yaml'
export const OPENAPI_FETCH_REQUEST = 'OPENAPI_FETCH_REQUEST'
export const OPENAPI_FETCH_SUCCESS = 'OPENAPI_FETCH_SUCCESS'
export const OPENAPI_FETCH_FAIL = 'OPENAPI_FETCH_FAIL'
export const fetchOpenApiRequest = () => ({
type: OPENAPI_FETCH_REQUEST
})
export const fetchOpenApiSuccess = (yaml_data, whiteLabel) => {
const data = yaml.safeLoad(yaml_data)
if (whiteLabel) {
const paths = {}
for (let path in data.paths) {
// Remove tenant list api
if (path === '/api/tenants') {
continue
}
// Remove tenant in path parameter
data.paths[path].get.parameters.splice(0, 1)
paths[path.replace('/api/tenant/{tenant}/', '/api/')] = data.paths[path]
}
data.paths = paths
}
data.servers = [{
// Trim the trailing '/api/'
url: API.apiUrl.substr(0, API.apiUrl.length - 5),
description: 'Production server',
}]
return {
type: OPENAPI_FETCH_SUCCESS,
openapi: data,
}
}
const fetchOpenApiFail = error => ({
type: OPENAPI_FETCH_FAIL,
error
})
const fetchOpenApi = (whiteLabel) => dispatch => {
dispatch(fetchOpenApiRequest())
return API.fetchOpenApi()
.then(response => dispatch(fetchOpenApiSuccess(response.data, whiteLabel)))
.catch(error => {
dispatch(fetchOpenApiFail(error))
setTimeout(() => {dispatch(fetchOpenApi())}, 5000)
})
}
const shouldFetchOpenApi = openapi => {
if (!openapi.openapi) {
return true
}
if (openapi.isFetching) {
return false
}
return true
}
export const fetchOpenApiIfNeeded = (force) => (dispatch, getState) => {
const state = getState()
if (force || shouldFetchOpenApi(state.openapi)) {
return dispatch(fetchOpenApi(state.tenant.whiteLabel))
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/openapi.js
|
openapi.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
export const TENANT_SET = 'TENANT_SET'
export function setTenantAction (name, whiteLabel) {
let apiPrefix = ''
let linkPrefix = ''
let routePrefix = ''
let defaultRoute = '/status'
if (!whiteLabel) {
apiPrefix = 'tenant/' + name + '/'
linkPrefix = '/t/' + name
routePrefix = '/t/:tenant'
defaultRoute = '/tenants'
}
return {
type: TENANT_SET,
tenant: {
name: name,
whiteLabel: whiteLabel,
defaultRoute: defaultRoute,
linkPrefix: linkPrefix,
apiPrefix: apiPrefix,
routePrefix: routePrefix
}
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/tenant.js
|
tenant.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const JOBS_FETCH_REQUEST = 'JOBS_FETCH_REQUEST'
export const JOBS_FETCH_SUCCESS = 'JOBS_FETCH_SUCCESS'
export const JOBS_FETCH_FAIL = 'JOBS_FETCH_FAIL'
export const requestJobs = () => ({
type: JOBS_FETCH_REQUEST
})
export const receiveJobs = (tenant, json) => ({
type: JOBS_FETCH_SUCCESS,
tenant: tenant,
jobs: json,
receivedAt: Date.now()
})
const failedJobs = error => ({
type: JOBS_FETCH_FAIL,
error
})
const fetchJobs = (tenant) => dispatch => {
dispatch(requestJobs())
return API.fetchJobs(tenant.apiPrefix)
.then(response => dispatch(receiveJobs(tenant.name, response.data)))
.catch(error => dispatch(failedJobs(error)))
}
const shouldFetchJobs = (tenant, state) => {
const jobs = state.jobs.jobs[tenant.name]
if (!jobs || jobs.length === 0) {
return true
}
if (jobs.isFetching) {
return false
}
return false
}
export const fetchJobsIfNeeded = (tenant, force) => (dispatch, getState) => {
if (force || shouldFetchJobs(tenant, getState())) {
return dispatch(fetchJobs(tenant))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/jobs.js
|
jobs.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const SEMAPHORES_FETCH_REQUEST = 'SEMAPHORES_FETCH_REQUEST'
export const SEMAPHORES_FETCH_SUCCESS = 'SEMAPHORES_FETCH_SUCCESS'
export const SEMAPHORES_FETCH_FAIL = 'SEMAPHORES_FETCH_FAIL'
export const requestSemaphores = () => ({
type: SEMAPHORES_FETCH_REQUEST
})
export const receiveSemaphores = (tenant, json) => ({
type: SEMAPHORES_FETCH_SUCCESS,
tenant: tenant,
semaphores: json,
receivedAt: Date.now()
})
const failedSemaphores = error => ({
type: SEMAPHORES_FETCH_FAIL,
error
})
const fetchSemaphores = (tenant) => dispatch => {
dispatch(requestSemaphores())
return API.fetchSemaphores(tenant.apiPrefix)
.then(response => dispatch(receiveSemaphores(tenant.name, response.data)))
.catch(error => dispatch(failedSemaphores(error)))
}
const shouldFetchSemaphores = (tenant, state) => {
const semaphores = state.semaphores.semaphores[tenant.name]
if (!semaphores || semaphores.length === 0) {
return true
}
if (semaphores.isFetching) {
return false
}
return false
}
export const fetchSemaphoresIfNeeded = (tenant, force) => (dispatch, getState) => {
if (force || shouldFetchSemaphores(tenant, getState())) {
return dispatch(fetchSemaphores(tenant))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/semaphores.js
|
semaphores.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const PROJECT_FETCH_REQUEST = 'PROJECT_FETCH_REQUEST'
export const PROJECT_FETCH_SUCCESS = 'PROJECT_FETCH_SUCCESS'
export const PROJECT_FETCH_FAIL = 'PROJECT_FETCH_FAIL'
export const requestProject = () => ({
type: PROJECT_FETCH_REQUEST
})
export const receiveProject = (tenant, projectName, project) => {
// TODO: fix api to return template name or merge them
// in the mean-time, merge the jobs in project configs
const templateIdx = []
let idx
project.configs.forEach((config, idx) => {
if (config.is_template === true) {
// This must be a template
templateIdx.push(idx)
config.pipelines.forEach(templatePipeline => {
let pipeline = project.configs[idx - 1].pipelines.filter(
item => item.name === templatePipeline.name)
if (pipeline.length === 0) {
// Pipeline doesn't exist in project config
project.configs[idx - 1].pipelines.push(templatePipeline)
} else {
if (pipeline[0].queue_name === null) {
pipeline[0].queue_name = templatePipeline.queue_name
}
templatePipeline.jobs.forEach(job => {
pipeline[0].jobs.push(job)
})
}
})
}
})
for (idx = templateIdx.length - 1; idx >= 0; idx -= 1) {
project.configs.splice(templateIdx[idx], 1)
}
return {
type: PROJECT_FETCH_SUCCESS,
tenant: tenant,
projectName: projectName,
project: project,
receivedAt: Date.now(),
}
}
const failedProject = error => ({
type: PROJECT_FETCH_FAIL,
error
})
const fetchProject = (tenant, project) => dispatch => {
dispatch(requestProject())
return API.fetchProject(tenant.apiPrefix, project)
.then(response => dispatch(receiveProject(
tenant.name, project, response.data)))
.catch(error => dispatch(failedProject(error)))
}
const shouldFetchProject = (tenant, projectName, state) => {
const tenantProjects = state.project.projects[tenant.name]
if (tenantProjects) {
const project = tenantProjects[projectName]
if (!project) {
return true
}
if (project.isFetching) {
return false
}
return false
}
return true
}
export const fetchProjectIfNeeded = (tenant, project, force) => (
dispatch, getState) => {
if (force || shouldFetchProject(tenant, project, getState())) {
return dispatch(fetchProject(tenant, project))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/project.js
|
project.js
|
// Copyright 2019 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as buildAction from './build'
it('processes job-output properly', () => {
expect(buildAction.didTaskFail({failed: true})).toEqual(true)
expect(buildAction.hasInterestingKeys({rc: 42}, ['rc'])).toEqual(true)
expect(buildAction.hasInterestingKeys({noop: 42}, ['rc'])).toEqual(false)
// Check trailing / are removed
let obj = {children: [], mimetype: 'test', name: 'test'}
let tree = buildAction.renderTree(
{linkPrefix: 'test/'},
{log_url: 'http://test/', uuid: 'test'},
'/', obj, (a) => (a), (a) => (a))
expect(tree).toEqual(
{'icon': 'fa fa-file-o', 'nodes': [], 'text': 'http://test'})
})
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/build.test.js
|
build.test.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
export const ADD_NOTIFICATION = 'ADD_NOTIFICATION'
export const CLEAR_NOTIFICATION = 'CLEAR_NOTIFICATION'
export const CLEAR_NOTIFICATIONS = 'CLEAR_NOTIFICATIONS'
let notificationId = 0
export const addNotification = notification => ({
type: ADD_NOTIFICATION,
id: notificationId++,
notification
})
export const addApiError = error => {
const d = {
url: (error && error.request && error.request.responseURL) || error.url,
type: 'error',
}
if (error.response) {
d.text = error.response.statusText
d.status = error.response.status
} else {
d.status = 'Unable to fetch URL, check your network connectivity,'
+ ' browser plugins, ad-blockers, or try to refresh this page'
d.text = error.message
}
return addNotification(d)
}
export const clearNotification = id => ({
type: CLEAR_NOTIFICATION,
id
})
export const clearNotifications = () => ({
type: CLEAR_NOTIFICATIONS
})
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/notifications.js
|
notifications.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const STATUS_FETCH_REQUEST = 'STATUS_FETCH_REQUEST'
export const STATUS_FETCH_SUCCESS = 'STATUS_FETCH_SUCCESS'
export const STATUS_FETCH_FAIL = 'STATUS_FETCH_FAIL'
export const requestStatus = () => ({
type: STATUS_FETCH_REQUEST
})
export const receiveStatus = json => ({
type: STATUS_FETCH_SUCCESS,
status: json,
receivedAt: Date.now()
})
const failedStatus = error => ({
type: STATUS_FETCH_FAIL,
error
})
// Create fake delay
//function sleeper(ms) {
// return function(x) {
// return new Promise(resolve => setTimeout(() => resolve(x), ms));
// };
//}
const fetchStatus = (tenant) => dispatch => {
dispatch(requestStatus())
return API.fetchStatus(tenant.apiPrefix)
.then(response => dispatch(receiveStatus(response.data)))
.catch(error => dispatch(failedStatus(error)))
}
const shouldFetchStatus = state => {
const status = state.status
if (!status) {
return true
}
if (status.isFetching) {
return false
}
return true
}
export const fetchStatusIfNeeded = (tenant) => (dispatch, getState) => {
if (shouldFetchStatus(getState())) {
return dispatch(fetchStatus(tenant))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/status.js
|
status.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const NODES_FETCH_REQUEST = 'NODES_FETCH_REQUEST'
export const NODES_FETCH_SUCCESS = 'NODES_FETCH_SUCCESS'
export const NODES_FETCH_FAIL = 'NODES_FETCH_FAIL'
export const requestNodes = () => ({
type: NODES_FETCH_REQUEST
})
export const receiveNodes = (tenant, json) => ({
type: NODES_FETCH_SUCCESS,
nodes: json,
receivedAt: Date.now()
})
const failedNodes = error => ({
type: NODES_FETCH_FAIL,
error
})
const fetchNodes = (tenant) => dispatch => {
dispatch(requestNodes())
return API.fetchNodes(tenant.apiPrefix)
.then(response => dispatch(receiveNodes(tenant.name, response.data)))
.catch(error => dispatch(failedNodes(error)))
}
const shouldFetchNodes = (tenant, state) => {
const nodes = state.nodes
if (!nodes || nodes.nodes.length === 0) {
return true
}
if (nodes.isFetching) {
return false
}
if (Date.now() - nodes.receivedAt > 60000) {
// Refetch after 1 minutes
return true
}
return false
}
export const fetchNodesIfNeeded = (tenant, force) => (
dispatch, getState) => {
if (force || shouldFetchNodes(tenant, getState())) {
return dispatch(fetchNodes(tenant))
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/nodes.js
|
nodes.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
export const PREFERENCE_SET = 'PREFERENCE_SET'
export function setPreference (key, value) {
return {
type: PREFERENCE_SET,
key,
value,
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/preferences.js
|
preferences.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const CHANGE_FETCH_REQUEST = 'CHANGE_FETCH_REQUEST'
export const CHANGE_FETCH_SUCCESS = 'CHANGE_FETCH_SUCCESS'
export const CHANGE_FETCH_FAIL = 'CHANGE_FETCH_FAIL'
export const requestChange = () => ({
type: CHANGE_FETCH_REQUEST
})
export const receiveChange = json => ({
type: CHANGE_FETCH_SUCCESS,
change: json,
receivedAt: Date.now()
})
const failedChange = error => ({
type: CHANGE_FETCH_FAIL,
error
})
const fetchChange = (tenant, changeId) => dispatch => {
dispatch(requestChange())
return API.fetchChangeStatus(tenant.apiPrefix, changeId)
.then(response => dispatch(receiveChange(response.data)))
.catch(error => dispatch(failedChange(error)))
}
const shouldFetchChange = state => {
const change = state.change
if (!change) {
return true
}
if (change.isFetching) {
return false
}
return true
}
export const fetchChangeIfNeeded = (tenant, change, force) => (
dispatch, getState) => {
if (force || shouldFetchChange(getState())) {
return dispatch(fetchChange(tenant, change))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/change.js
|
change.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const TENANTS_FETCH_REQUEST = 'TENANTS_FETCH_REQUEST'
export const TENANTS_FETCH_SUCCESS = 'TENANTS_FETCH_SUCCESS'
export const TENANTS_FETCH_FAIL = 'TENANTS_FETCH_FAIL'
export const requestTenants = () => ({
type: TENANTS_FETCH_REQUEST
})
export const receiveTenants = json => ({
type: TENANTS_FETCH_SUCCESS,
tenants: json,
receivedAt: Date.now()
})
const failedTenants = error => ({
type: TENANTS_FETCH_FAIL,
error
})
const fetchTenants = () => dispatch => {
dispatch(requestTenants())
return API.fetchTenants()
.then(response => dispatch(receiveTenants(response.data)))
.catch(error => dispatch(failedTenants(error)))
}
const shouldFetchTenants = state => {
const tenants = state.tenants
if (tenants.tenants.length > 0) {
return false
}
if (tenants.isFetching) {
return false
}
return true
}
export const fetchTenantsIfNeeded = (force) => (dispatch, getState) => {
if (force || shouldFetchTenants(getState())) {
return dispatch(fetchTenants())
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/tenants.js
|
tenants.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const FREEZE_JOB_FETCH_REQUEST = 'FREEZE_JOB_FETCH_REQUEST'
export const FREEZE_JOB_FETCH_SUCCESS = 'FREEZE_JOB_FETCH_SUCCESS'
export const FREEZE_JOB_FETCH_FAIL = 'FREEZE_JOB_FETCH_FAIL'
export const requestFreezeJob = () => ({
type: FREEZE_JOB_FETCH_REQUEST
})
export function makeFreezeJobKey(pipeline, project, branch, job) {
return JSON.stringify({
pipeline, project, branch, job
})
}
export const receiveFreezeJob = (tenant, freezeJobKey, freezeJob) => {
return {
type: FREEZE_JOB_FETCH_SUCCESS,
tenant: tenant,
freezeJobKey: freezeJobKey,
freezeJob: freezeJob,
receivedAt: Date.now(),
}
}
const failedFreezeJob = error => ({
type: FREEZE_JOB_FETCH_FAIL,
error
})
const fetchFreezeJob = (tenant, pipeline, project, branch, job) => dispatch => {
dispatch(requestFreezeJob())
const freezeJobKey = makeFreezeJobKey(pipeline, project, branch, job)
return API.fetchFreezeJob(tenant.apiPrefix,
pipeline,
project,
branch,
job)
.then(response => dispatch(receiveFreezeJob(
tenant.name, freezeJobKey, response.data)))
.catch(error => dispatch(failedFreezeJob(error)))
}
const shouldFetchFreezeJob = (tenant, pipeline, project, branch, job, state) => {
const freezeJobKey = makeFreezeJobKey(pipeline, project, branch, job)
const tenantFreezeJobs = state.freezejob.freezeJobs[tenant.name]
if (tenantFreezeJobs) {
const freezeJob = tenantFreezeJobs[freezeJobKey]
if (!freezeJob) {
return true
}
if (freezeJob.isFetching) {
return false
}
return false
}
return true
}
export const fetchFreezeJobIfNeeded = (tenant, pipeline, project, branch, job,
force) => (
dispatch, getState) => {
if (force || shouldFetchFreezeJob(tenant, pipeline, project, branch, job,
getState())) {
return dispatch(fetchFreezeJob(tenant, pipeline, project, branch, job))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/freezejob.js
|
freezejob.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const JOB_FETCH_REQUEST = 'JOB_FETCH_REQUEST'
export const JOB_FETCH_SUCCESS = 'JOB_FETCH_SUCCESS'
export const JOB_FETCH_FAIL = 'JOB_FETCH_FAIL'
export const requestJob = () => ({
type: JOB_FETCH_REQUEST
})
export const receiveJob = (tenant, jobname, json) => ({
type: JOB_FETCH_SUCCESS,
tenant: tenant,
jobname: jobname,
job: json,
receivedAt: Date.now()
})
const failedJob = error => ({
type: JOB_FETCH_FAIL,
error
})
const fetchJob = (tenant, jobname) => dispatch => {
dispatch(requestJob())
return API.fetchJob(tenant.apiPrefix, jobname)
.then(response => dispatch(receiveJob(tenant.name, jobname, response.data)))
.catch(error => dispatch(failedJob(error)))
}
const shouldFetchJob = (tenant, jobname, state) => {
const tenantJobs = state.job.jobs[tenant.name]
if (tenantJobs) {
const job = tenantJobs[jobname]
if (!job) {
return true
}
if (job.isFetching) {
return false
}
return false
}
return true
}
export const fetchJobIfNeeded = (tenant, jobname, force) => (
dispatch, getState) => {
if (force || shouldFetchJob(tenant, jobname, getState())) {
return dispatch(fetchJob(tenant, jobname))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/job.js
|
job.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import Axios from 'axios'
export const LOGFILE_FETCH_REQUEST = 'LOGFILE_FETCH_REQUEST'
export const LOGFILE_FETCH_SUCCESS = 'LOGFILE_FETCH_SUCCESS'
export const LOGFILE_FETCH_FAIL = 'LOGFILE_FETCH_FAIL'
export const requestLogfile = (url) => ({
type: LOGFILE_FETCH_REQUEST,
url: url,
})
const SYSLOGDATE = '\\w+\\s+\\d+\\s+\\d{2}:\\d{2}:\\d{2}((\\.|\\,)\\d{3,6})?'
const DATEFMT = '\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}((\\.|\\,)\\d{3,6})?'
const STATUSFMT = '(DEBUG|INFO|WARNING|ERROR|TRACE|AUDIT|CRITICAL)'
const severityMap = {
DEBUG: 1,
INFO: 2,
WARNING: 3,
ERROR: 4,
TRACE: 5,
AUDIT: 6,
CRITICAL: 7,
}
const OSLO_LOGMATCH = new RegExp(`^(${DATEFMT})(( \\d+)? (${STATUSFMT}).*)`)
const SYSTEMD_LOGMATCH = new RegExp(`^(${SYSLOGDATE})( (\\S+) \\S+\\[\\d+\\]\\: (${STATUSFMT}).*)`)
const receiveLogfile = (buildId, file, data) => {
const out = data.split(/\r?\n/).map((line, idx) => {
let m = null
let sev = null
m = SYSTEMD_LOGMATCH.exec(line)
if (m) {
sev = severityMap[m[7]]
} else {
OSLO_LOGMATCH.exec(line)
if (m) {
sev = severityMap[m[7]]
}
}
return {
text: line,
index: idx+1,
severity: sev
}
})
return {
type: LOGFILE_FETCH_SUCCESS,
buildId,
fileName: file,
fileContent: out,
receivedAt: Date.now()
}
}
const failedLogfile = (error, url) => {
error.url = url
return {
type: LOGFILE_FETCH_FAIL,
error
}
}
export function fetchLogfile(buildId, file, state) {
return async function (dispatch) {
// Don't do anything if the logfile is already part of our local state
if (
buildId in state.logfile.files &&
file in state.logfile.files[buildId]
) {
return Promise.resolve()
}
// Since this method is only called after fetchBuild() and fetchManifest(),
// we can assume both are there.
const build = state.build.builds[buildId]
const manifest = state.build.manifests[buildId]
const item = manifest.index['/' + file]
if (!item) {
return dispatch(
failedLogfile(Error(`No manifest entry found for logfile "${file}"`))
)
}
if (item.mimetype !== 'text/plain') {
return dispatch(
failedLogfile(Error(`Logfile "${file}" has invalid mimetype`))
)
}
const url = build.log_url + file
dispatch(requestLogfile())
try {
const response = await Axios.get(url, { transformResponse: [] })
dispatch(receiveLogfile(buildId, file, response.data))
} catch(error) {
dispatch(failedLogfile(error, url))
}
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/logfile.js
|
logfile.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const PIPELINES_FETCH_REQUEST = 'PIPELINES_FETCH_REQUEST'
export const PIPELINES_FETCH_SUCCESS = 'PIPELINES_FETCH_SUCCESS'
export const PIPELINES_FETCH_FAIL = 'PIPELINES_FETCH_FAIL'
export const requestPipelines = () => ({
type: PIPELINES_FETCH_REQUEST
})
export const receivePipelines = (tenant, json) => ({
type: PIPELINES_FETCH_SUCCESS,
tenant: tenant,
pipelines: json,
receivedAt: Date.now()
})
const failedPipelines = error => ({
type: PIPELINES_FETCH_FAIL,
error
})
const fetchPipelines = (tenant) => dispatch => {
dispatch(requestPipelines())
return API.fetchPipelines(tenant.apiPrefix)
.then(response => dispatch(receivePipelines(tenant.name, response.data)))
.catch(error => dispatch(failedPipelines(error)))
}
const shouldFetchPipelines = (tenant, state) => {
const pipelines = state.pipelines.pipelines[tenant.name]
if (!pipelines || pipelines.length === 0) {
return true
}
if (pipelines.isFetching) {
return false
}
return false
}
export const fetchPipelinesIfNeeded = (tenant, force) => (
dispatch, getState) => {
if (force || shouldFetchPipelines(tenant, getState())) {
return dispatch(fetchPipelines(tenant))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/pipelines.js
|
pipelines.js
|
// Copyright 2021 BMW Group
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as types from './actionTypes'
import * as API from '../api'
function requestComponents() {
return { type: types.COMPONENTS_FETCH_REQUEST }
}
function receiveComponents(components) {
return { type: types.COMPONENTS_FETCH_SUCCESS, components }
}
function failedComponents(error) {
return { type: types.COMPONENTS_FETCH_FAIL, error }
}
export function fetchComponents() {
return async function (dispatch) {
dispatch(requestComponents())
try {
const response = await API.fetchComponents()
dispatch(receiveComponents(response.data))
} catch (error) {
dispatch(failedComponents(error))
}
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/component.js
|
component.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import Axios from 'axios'
import * as API from '../api'
import { fetchLogfile } from './logfile'
export const BUILD_FETCH_REQUEST = 'BUILD_FETCH_REQUEST'
export const BUILD_FETCH_SUCCESS = 'BUILD_FETCH_SUCCESS'
export const BUILD_FETCH_FAIL = 'BUILD_FETCH_FAIL'
export const BUILDSET_FETCH_REQUEST = 'BUILDSET_FETCH_REQUEST'
export const BUILDSET_FETCH_SUCCESS = 'BUILDSET_FETCH_SUCCESS'
export const BUILDSET_FETCH_FAIL = 'BUILDSET_FETCH_FAIL'
export const BUILD_OUTPUT_REQUEST = 'BUILD_OUTPUT_FETCH_REQUEST'
export const BUILD_OUTPUT_SUCCESS = 'BUILD_OUTPUT_FETCH_SUCCESS'
export const BUILD_OUTPUT_FAIL = 'BUILD_OUTPUT_FETCH_FAIL'
export const BUILD_OUTPUT_NOT_AVAILABLE = 'BUILD_OUTPUT_NOT_AVAILABLE'
export const BUILD_MANIFEST_REQUEST = 'BUILD_MANIFEST_FETCH_REQUEST'
export const BUILD_MANIFEST_SUCCESS = 'BUILD_MANIFEST_FETCH_SUCCESS'
export const BUILD_MANIFEST_FAIL = 'BUILD_MANIFEST_FETCH_FAIL'
export const BUILD_MANIFEST_NOT_AVAILABLE = 'BUILD_MANIFEST_NOT_AVAILABLE'
export const requestBuild = () => ({
type: BUILD_FETCH_REQUEST
})
export const receiveBuild = (buildId, build) => ({
type: BUILD_FETCH_SUCCESS,
buildId: buildId,
build: build,
receivedAt: Date.now()
})
const failedBuild = (buildId, error, url) => {
error.url = url
return {
type: BUILD_FETCH_FAIL,
buildId,
error
}
}
export const requestBuildOutput = () => ({
type: BUILD_OUTPUT_REQUEST
})
// job-output processing functions
export function renderTree(tenant, build, path, obj, textRenderer, defaultRenderer) {
const node = {}
let name = obj.name
if ('children' in obj && obj.children) {
node.nodes = obj.children.map(
n => renderTree(tenant, build, path+obj.name+'/', n,
textRenderer, defaultRenderer))
}
if (obj.mimetype === 'application/directory') {
name = obj.name + '/'
} else {
node.icon = 'fa fa-file-o'
}
let log_url = build.log_url
if (log_url.endsWith('/')) {
log_url = log_url.slice(0, -1)
}
if (obj.mimetype === 'text/plain') {
node.text = textRenderer(tenant, build, path, name, log_url, obj)
} else {
node.text = defaultRenderer(log_url, path, name, obj)
}
return node
}
export function didTaskFail(task) {
if (task.failed) {
return true
}
if (task.results) {
for (let result of task.results) {
if (didTaskFail(result)) {
return true
}
}
}
return false
}
export function hasInterestingKeys (obj, keys) {
return Object.entries(obj).filter(
([k, v]) => (keys.includes(k) && v !== '')
).length > 0
}
export function findLoopLabel(item) {
const label = item._ansible_item_label
return typeof(label) === 'string' ? label : ''
}
export function shouldIncludeKey(key, value, ignore_underscore, included) {
if (ignore_underscore && key[0] === '_') {
return false
}
if (included) {
if (!included.includes(key)) {
return false
}
if (value === '') {
return false
}
}
return true
}
export function makeTaskPath (path) {
return path.join('/')
}
export function taskPathMatches (ref, test) {
if (test.length < ref.length)
return false
for (let i=0; i < ref.length; i++) {
if (ref[i] !== test[i])
return false
}
return true
}
export const receiveBuildOutput = (buildId, output) => {
const hosts = {}
const taskFailed = (taskResult) => {
if (taskResult.rc && taskResult.failed_when_result !== false)
return true
else if (taskResult.failed)
return true
else
return false
}
// Compute stats
output.forEach(phase => {
Object.entries(phase.stats).forEach(([host, stats]) => {
if (!hosts[host]) {
hosts[host] = stats
hosts[host].failed = []
} else {
hosts[host].changed += stats.changed
hosts[host].failures += stats.failures
hosts[host].ok += stats.ok
}
if (stats.failures > 0) {
// Look for failed tasks
phase.plays.forEach(play => {
play.tasks.forEach(task => {
if (task.hosts[host]) {
if (task.hosts[host].results &&
task.hosts[host].results.length > 0) {
task.hosts[host].results.forEach(result => {
if (taskFailed(result)) {
result.name = task.task.name
hosts[host].failed.push(result)
}
})
} else if (taskFailed(task.hosts[host])) {
let result = task.hosts[host]
result.name = task.task.name
hosts[host].failed.push(result)
}
}
})
})
}
})
})
// Identify all of the hosttasks (and therefore tasks, plays, and
// playbooks) which have failed. The errorIds are either task or
// play uuids, or the phase+index for the playbook. Since they are
// different formats, we can store them in the same set without
// collisions.
const errorIds = new Set()
output.forEach(playbook => {
playbook.plays.forEach(play => {
play.tasks.forEach(task => {
Object.entries(task.hosts).forEach(([, host]) => {
if (didTaskFail(host)) {
errorIds.add(task.task.id)
errorIds.add(play.play.id)
errorIds.add(playbook.phase + playbook.index)
}
})
})
})
})
return {
type: BUILD_OUTPUT_SUCCESS,
buildId: buildId,
hosts: hosts,
output: output,
errorIds: errorIds,
receivedAt: Date.now()
}
}
const failedBuildOutput = (buildId, error, url) => {
error.url = url
return {
type: BUILD_OUTPUT_FAIL,
buildId,
error
}
}
export const requestBuildManifest = () => ({
type: BUILD_MANIFEST_REQUEST
})
export const receiveBuildManifest = (buildId, manifest) => {
const index = {}
const renderNode = (root, object) => {
const path = root + '/' + object.name
if ('children' in object && object.children) {
object.children.map(n => renderNode(path, n))
} else {
index[path] = object
}
}
manifest.tree.map(n => renderNode('', n))
return {
type: BUILD_MANIFEST_SUCCESS,
buildId: buildId,
manifest: {tree: manifest.tree, index: index,
index_links: manifest.index_links},
receivedAt: Date.now()
}
}
const failedBuildManifest = (buildId, error, url) => {
error.url = url
return {
type: BUILD_MANIFEST_FAIL,
buildId,
error
}
}
function buildOutputNotAvailable(buildId) {
return {
type: BUILD_OUTPUT_NOT_AVAILABLE,
buildId: buildId,
}
}
function buildManifestNotAvailable(buildId) {
return {
type: BUILD_MANIFEST_NOT_AVAILABLE,
buildId: buildId,
}
}
export function fetchBuild(tenant, buildId, state) {
return async function (dispatch) {
// Although it feels a little weird to not do anything in an action creator
// based on the redux state, we do this in here because the function is
// called from multiple places and it's easier to check for the build in
// here than in all the other places before calling this function.
if (state.build.builds[buildId]) {
return Promise.resolve()
}
dispatch(requestBuild())
try {
const response = await API.fetchBuild(tenant.apiPrefix, buildId)
dispatch(receiveBuild(buildId, response.data))
} catch (error) {
dispatch(failedBuild(buildId, error, tenant.apiPrefix))
// Raise the error again, so fetchBuildAllInfo() doesn't call the
// remaining fetch methods.
throw error
}
}
}
function fetchBuildOutput(buildId, state) {
return async function (dispatch) {
// In case the value is already set in our local state, directly resolve the
// promise. A null value means that the output could not be found for this
// build id.
if (state.build.outputs[buildId] !== undefined) {
return Promise.resolve()
}
// As this function is only called after fetchBuild() we can assume that
// the build is in the state. Otherwise an error would have been thrown and
// this function wouldn't be called.
const build = state.build.builds[buildId]
if (!build.log_url) {
// Don't treat a missing log URL as failure as we don't want to show a
// toast for that. The UI already informs about the missing log URL in
// multiple places.
return dispatch(buildOutputNotAvailable(buildId))
}
const url = build.log_url.substr(0, build.log_url.lastIndexOf('/') + 1)
dispatch(requestBuildOutput())
try {
const response = await Axios.get(url + 'job-output.json.gz')
dispatch(receiveBuildOutput(buildId, response.data))
} catch (error) {
if (!error.request) {
dispatch(failedBuildOutput(buildId, error, url))
// Raise the error again, so fetchBuildAllInfo() doesn't call the
// remaining fetch methods.
throw error
}
try {
// Try without compression
const response = await Axios.get(url + 'job-output.json')
dispatch(receiveBuildOutput(buildId, response.data))
} catch (error) {
dispatch(failedBuildOutput(buildId, error, url))
// Raise the error again, so fetchBuildAllInfo() doesn't call the
// remaining fetch methods.
throw error
}
}
}
}
export function fetchBuildManifest(buildId, state) {
return async function(dispatch) {
// In case the value is already set in our local state, directly resolve the
// promise. A null value means that the manifest could not be found for this
// build id.
if (state.build.manifests[buildId] !== undefined) {
return Promise.resolve()
}
// As this function is only called after fetchBuild() we can assume that
// the build is in the state. Otherwise an error would have been thrown and
// this function wouldn't be called.
const build = state.build.builds[buildId]
dispatch(requestBuildManifest())
for (let artifact of build.artifacts) {
if (
'metadata' in artifact &&
'type' in artifact.metadata &&
artifact.metadata.type === 'zuul_manifest'
) {
try {
const response = await Axios.get(artifact.url)
return dispatch(receiveBuildManifest(buildId, response.data))
} catch(error) {
// Show the error since we expected a manifest but did not
// receive it.
dispatch(failedBuildManifest(buildId, error, artifact.url))
}
}
}
// Don't treat a missing manifest file as failure as we don't want to show a
// toast for that.
dispatch(buildManifestNotAvailable(buildId))
}
}
export function fetchBuildAllInfo(tenant, buildId, logfileName) {
// This wraps the calls to fetch the build, output and manifest together as
// this is the common use case we have when loading the build info.
return async function (dispatch, getState) {
try {
// Wait for the build to be available as fetchBuildOutput and
// fetchBuildManifest require information from the build object.
await dispatch(fetchBuild(tenant, buildId, getState()))
dispatch(fetchBuildOutput(buildId, getState()))
// Wait for the manifest info to be available as this is needed in case
// we also download a logfile.
await dispatch(fetchBuildManifest(buildId, getState()))
if (logfileName) {
dispatch(fetchLogfile(buildId, logfileName, getState()))
}
} catch (error) {
dispatch(failedBuild(buildId, error, tenant.apiPrefix))
}
}
}
export const requestBuildset = () => ({
type: BUILDSET_FETCH_REQUEST
})
export const receiveBuildset = (buildsetId, buildset) => ({
type: BUILDSET_FETCH_SUCCESS,
buildsetId: buildsetId,
buildset: buildset,
receivedAt: Date.now()
})
const failedBuildset = (buildsetId, error) => ({
type: BUILDSET_FETCH_FAIL,
buildsetId,
error
})
export function fetchBuildset(tenant, buildsetId) {
return async function(dispatch) {
dispatch(requestBuildset())
try {
const response = await API.fetchBuildset(tenant.apiPrefix, buildsetId)
dispatch(receiveBuildset(buildsetId, response.data))
} catch (error) {
dispatch(failedBuildset(buildsetId, error))
}
}
}
const shouldFetchBuildset = (buildsetId, state) => {
const buildset = state.build.buildsets[buildsetId]
if (!buildset) {
return true
}
if (buildset.isFetching) {
return false
}
return false
}
export const fetchBuildsetIfNeeded = (tenant, buildsetId, force) => (
dispatch, getState) => {
if (force || shouldFetchBuildset(buildsetId, getState())) {
return dispatch(fetchBuildset(tenant, buildsetId))
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/build.js
|
build.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const LABELS_FETCH_REQUEST = 'LABELS_FETCH_REQUEST'
export const LABELS_FETCH_SUCCESS = 'LABELS_FETCH_SUCCESS'
export const LABELS_FETCH_FAIL = 'LABELS_FETCH_FAIL'
export const requestLabels = () => ({
type: LABELS_FETCH_REQUEST
})
export const receiveLabels = (tenant, json) => ({
type: LABELS_FETCH_SUCCESS,
tenant: tenant,
labels: json,
receivedAt: Date.now()
})
const failedLabels = error => ({
type: LABELS_FETCH_FAIL,
error
})
const fetchLabels = (tenant) => dispatch => {
dispatch(requestLabels())
return API.fetchLabels(tenant.apiPrefix)
.then(response => dispatch(receiveLabels(tenant.name, response.data)))
.catch(error => dispatch(failedLabels(error)))
}
const shouldFetchLabels = (tenant, state) => {
const labels = state.labels.labels[tenant.name]
if (!labels || labels.length === 0) {
return true
}
if (labels.isFetching) {
return false
}
return false
}
export const fetchLabelsIfNeeded = (tenant, force) => (
dispatch, getState) => {
if (force || shouldFetchLabels(tenant, getState())) {
return dispatch(fetchLabels(tenant))
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/labels.js
|
labels.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const AUTOHOLDS_FETCH_REQUEST = 'AUTOHOLDS_FETCH_REQUEST'
export const AUTOHOLDS_FETCH_SUCCESS = 'AUTOHOLDS_FETCH_SUCCESS'
export const AUTOHOLDS_FETCH_FAIL = 'AUTOHOLDS_FETCH_FAIL'
export const AUTOHOLD_FETCH_REQUEST = 'AUTOHOLD_FETCH_REQUEST'
export const AUTOHOLD_FETCH_SUCCESS = 'AUTOHOLD_FETCH_SUCCESS'
export const AUTOHOLD_FETCH_FAIL = 'AUTOHOLD_FETCH_FAIL'
export const requestAutoholds = () => ({
type: AUTOHOLDS_FETCH_REQUEST
})
export const receiveAutoholds = (tenant, json) => ({
type: AUTOHOLDS_FETCH_SUCCESS,
autoholds: json,
receivedAt: Date.now()
})
const failedAutoholds = error => ({
type: AUTOHOLDS_FETCH_FAIL,
error
})
export const fetchAutoholds = (tenant) => dispatch => {
dispatch(requestAutoholds())
return API.fetchAutoholds(tenant.apiPrefix)
.then(response => dispatch(receiveAutoholds(tenant.name, response.data)))
.catch(error => dispatch(failedAutoholds(error)))
}
const shouldFetchAutoholds = (tenant, state) => {
const autoholds = state.autoholds
if (!autoholds || autoholds.autoholds.length === 0) {
return true
}
if (autoholds.isFetching) {
return false
}
if (Date.now() - autoholds.receivedAt > 60000) {
// Refetch after 1 minutes
return true
}
return false
}
export const fetchAutoholdsIfNeeded = (tenant, force) => (
dispatch, getState) => {
if (force || shouldFetchAutoholds(tenant, getState())) {
return dispatch(fetchAutoholds(tenant))
}
}
export const requestAutohold = () => ({
type: AUTOHOLD_FETCH_REQUEST
})
export const receiveAutohold = (tenant, json) => ({
type: AUTOHOLD_FETCH_SUCCESS,
autohold: json,
receivedAt: Date.now()
})
const failedAutohold = error => ({
type: AUTOHOLD_FETCH_FAIL,
error
})
export const fetchAutohold = (tenant, requestId) => dispatch => {
dispatch(requestAutohold())
return API.fetchAutohold(tenant.apiPrefix, requestId)
.then(response => dispatch(receiveAutohold(tenant.name, response.data)))
.catch(error => dispatch(failedAutohold(error)))
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/autoholds.js
|
autoholds.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import { fetchConfigErrors } from '../api'
export function fetchConfigErrorsAction (tenant) {
return (dispatch) => {
return fetchConfigErrors(tenant.apiPrefix)
.then(response => {
dispatch({type: 'CONFIGERRORS_FETCH_SUCCESS',
errors: response.data})
})
.catch(error => {
dispatch({type: 'CONFIGERRORS_FETCH_FAIL',
error})
})
}
}
export function clearConfigErrorsAction () {
return (dispatch) => {
dispatch({type: 'CONFIGERRORS_CLEAR'})
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/configErrors.js
|
configErrors.js
|
// Copyright 2018 Red Hat, Inc
// Copyright 2022 Acme Gating, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const JOB_GRAPH_FETCH_REQUEST = 'JOB_GRAPH_FETCH_REQUEST'
export const JOB_GRAPH_FETCH_SUCCESS = 'JOB_GRAPH_FETCH_SUCCESS'
export const JOB_GRAPH_FETCH_FAIL = 'JOB_GRAPH_FETCH_FAIL'
export const requestJobGraph = () => ({
type: JOB_GRAPH_FETCH_REQUEST
})
export function makeJobGraphKey(project, pipeline, branch) {
return JSON.stringify({
project: project, pipeline: pipeline, branch: branch
})
}
export const receiveJobGraph = (tenant, jobGraphKey, jobGraph) => {
return {
type: JOB_GRAPH_FETCH_SUCCESS,
tenant: tenant,
jobGraphKey: jobGraphKey,
jobGraph: jobGraph,
receivedAt: Date.now(),
}
}
const failedJobGraph = error => ({
type: JOB_GRAPH_FETCH_FAIL,
error
})
const fetchJobGraph = (tenant, project, pipeline, branch) => dispatch => {
dispatch(requestJobGraph())
const jobGraphKey = makeJobGraphKey(project, pipeline, branch)
return API.fetchJobGraph(tenant.apiPrefix,
project,
pipeline,
branch)
.then(response => dispatch(receiveJobGraph(
tenant.name, jobGraphKey, response.data)))
.catch(error => dispatch(failedJobGraph(error)))
}
const shouldFetchJobGraph = (tenant, project, pipeline, branch, state) => {
const jobGraphKey = makeJobGraphKey(project, pipeline, branch)
const tenantJobGraphs = state.jobgraph.jobGraphs[tenant.name]
if (tenantJobGraphs) {
const jobGraph = tenantJobGraphs[jobGraphKey]
if (!jobGraph) {
return true
}
if (jobGraph.isFetching) {
return false
}
return false
}
return true
}
export const fetchJobGraphIfNeeded = (tenant, project, pipeline, branch,
force) => (
dispatch, getState) => {
if (force || shouldFetchJobGraph(tenant, project, pipeline, branch,
getState())) {
return dispatch(fetchJobGraph(tenant, project, pipeline, branch))
}
return Promise.resolve()
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/jobgraph.js
|
jobgraph.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const AUTH_CONFIG_REQUEST = 'AUTH_CONFIG_REQUEST'
export const AUTH_CONFIG_SUCCESS = 'AUTH_CONFIG_SUCCESS'
export const AUTH_CONFIG_FAIL = 'AUTH_CONFIG_FAIL'
export const USER_ACL_REQUEST = 'USER_ACL_REQUEST'
export const USER_ACL_SUCCESS = 'USER_ACL_SUCCESS'
export const USER_ACL_FAIL = 'USER_ACL_FAIL'
export const AUTH_START = 'AUTH_START'
const authConfigRequest = () => ({
type: AUTH_CONFIG_REQUEST
})
function createAuthParamsFromJson(json) {
let auth_info = json.info.capabilities.auth
let auth_params = {
authority: '',
client_id: '',
scope: '',
loadUserInfo: true,
}
if (!auth_info) {
console.log('No auth config')
return auth_params
}
const realm = auth_info.default_realm
const client_config = auth_info.realms[realm]
if (client_config && client_config.driver === 'OpenIDConnect') {
auth_params.client_id = client_config.client_id
auth_params.scope = client_config.scope
auth_params.authority = client_config.authority
auth_params.loadUserInfo = client_config.load_user_info
return auth_params
} else {
console.log('No OpenIDConnect provider found')
return auth_params
}
}
const authConfigSuccess = (json, auth_params) => ({
type: AUTH_CONFIG_SUCCESS,
info: json.info.capabilities.auth,
auth_params: auth_params,
})
const authConfigFail = error => ({
type: AUTH_CONFIG_FAIL,
error
})
export const configureAuthFromTenant = (tenantName) => (dispatch) => {
dispatch(authConfigRequest())
return API.fetchTenantInfo('tenant/' + tenantName + '/')
.then(response => {
dispatch(authConfigSuccess(
response.data,
createAuthParamsFromJson(response.data)))
})
.catch(error => {
dispatch(authConfigFail(error))
})
}
export const configureAuthFromInfo = (info) => (dispatch) => {
try {
dispatch(authConfigSuccess(
{info: info},
createAuthParamsFromJson({info: info})))
} catch(error) {
dispatch(authConfigFail(error))
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/auth.js
|
auth.js
|
// Copyright 2018 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
import * as API from '../api'
export const INFO_FETCH_REQUEST = 'INFO_FETCH_REQUEST'
export const INFO_FETCH_SUCCESS = 'INFO_FETCH_SUCCESS'
export const INFO_FETCH_FAIL = 'INFO_FETCH_FAIL'
export const fetchInfoRequest = () => ({
type: INFO_FETCH_REQUEST
})
export const fetchInfoSuccess = json => ({
type: INFO_FETCH_SUCCESS,
tenant: json.info.tenant,
capabilities: json.info.capabilities,
})
const fetchInfoFail = error => ({
type: INFO_FETCH_FAIL,
error
})
const fetchInfo = () => dispatch => {
dispatch(fetchInfoRequest())
return API.fetchInfo()
.then(response => {
dispatch(fetchInfoSuccess(response.data))
})
.catch(error => {
dispatch(fetchInfoFail(error))
setTimeout(() => {dispatch(fetchInfo())}, 5000)
})
}
const shouldFetchInfo = state => {
const info = state.info
if (!info) {
return true
}
if (info.isFetching) {
return false
}
return true
}
export const fetchInfoIfNeeded = () => (dispatch, getState) => {
if (shouldFetchInfo(getState())) {
return dispatch(fetchInfo())
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/info.js
|
info.js
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
export const TIMEZONE_SET = 'TIMEZONE_SET'
export function setTimezoneAction (name) {
return {
type: TIMEZONE_SET,
timezone: name
}
}
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/timezone.js
|
timezone.js
|
// Copyright 2021 BMW Group
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
export const COMPONENTS_FETCH_REQUEST = 'COMPONENTS_FETCH_REQUEST'
export const COMPONENTS_FETCH_SUCCESS = 'COMPONENTS_FETCH_SUCCESS'
export const COMPONENTS_FETCH_FAIL = 'COMPONENTS_FETCH_FAIL'
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/actionTypes.js
|
actionTypes.js
|
// Copyright 2020 Red Hat, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
export const ADMIN_DEQUEUE_FAIL = 'ADMIN_DEQUEUE_FAIL'
export const ADMIN_ENQUEUE_FAIL = 'ADMIN_ENQUEUE_FAIL'
export const ADMIN_AUTOHOLD_FAIL = 'ADMIN_AUTOHOLD_FAIL'
export const ADMIN_PROMOTE_FAIL = 'ADMIN_PROMOTE_FAIL'
function parseAPIerror(error) {
if (error.response) {
let parser = new DOMParser()
let htmlError = parser.parseFromString(error.response.data, 'text/html')
let error_description = htmlError.getElementsByTagName('p')[0].innerText
return (error_description)
} else {
return (error)
}
}
export const addDequeueError = error => ({
type: ADMIN_DEQUEUE_FAIL,
notification: parseAPIerror(error)
})
export const addEnqueueError = error => ({
type: ADMIN_ENQUEUE_FAIL,
notification: parseAPIerror(error)
})
export const addAutoholdError = error => ({
type: ADMIN_AUTOHOLD_FAIL,
notification: parseAPIerror(error)
})
export const addPromoteError = error => ({
type: ADMIN_PROMOTE_FAIL,
notification: parseAPIerror(error)
})
|
zuul
|
/zuul-9.1.0.tar.gz/zuul-9.1.0/web/src/actions/adminActions.js
|
adminActions.js
|
========
zuul_get
========
The ``zuul_get`` script retrieves status updates from OpenStack's Zuul
deployment and returns the status of a particular CI job. The script now
supports version 2 and 3 of Zuul.
Installation
------------
The easiest method is to use pip:
.. code-block:: console
pip install zuul_get
Running the script
------------------
Provide a six-digit gerrit review number as an argument to retrieve the CI job
URLs from Zuul's JSON status file. Here's an example:
.. code-block:: console
$ zuul_get 510588
+---------------------------------------------------+---------+----------------------+
| Zuulv2 Jobs for 510588 | | |
+---------------------------------------------------+---------+----------------------+
| gate-ansible-hardening-docs-ubuntu-xenial | Queued | |
| gate-ansible-hardening-linters-ubuntu-xenial | Queued | |
| gate-ansible-hardening-ansible-func-centos-7 | Success | https://is.gd/ifQc2I |
| gate-ansible-hardening-ansible-func-ubuntu-xenial | Queued | |
| gate-ansible-hardening-ansible-func-opensuse-423 | Success | https://is.gd/RiiZFW |
| gate-ansible-hardening-ansible-func-debian-jessie | Success | https://is.gd/gQ0izk |
| gate-ansible-hardening-ansible-func-fedora-26 | Success | https://is.gd/w9zTCa |
+---------------------------------------------------+---------+----------------------+
+-----------------------------------------------------+--------+--+
| Zuulv3 Jobs for 510588 | | |
+-----------------------------------------------------+--------+--+
| build-openstack-sphinx-docs | Queued | |
| openstack-tox-linters | Queued | |
| legacy-ansible-func-centos-7 | Queued | |
| legacy-ansible-func | Queued | |
| legacy-ansible-func-opensuse-423 | Queued | |
| legacy-ansible-hardening-ansible-func-debian-jessie | Queued | |
| legacy-ansible-hardening-ansible-func-fedora-26 | Queued | |
+-----------------------------------------------------+--------+--+
Currently running jobs will have a link displayed which allows you to view
the progress of a particular job. Zuulv2 uses ``telnet://`` links while
Zuulv3 has a continuously updating page in your browser.
Completed jobs will have a link to the job results.
Contributing
------------
Pull requests and GitHub issues are always welcome!
|
zuul_get
|
/zuul_get-1.2.tar.gz/zuul_get-1.2/README.rst
|
README.rst
|
#!/usr/bin/python
#
# Copyright 2016 Major Hayden
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
required_packages = [
"requests",
"terminaltables",
]
setup(
name='zuul_get',
version='1.2',
author='Major Hayden',
author_email='[email protected]',
description="Retrieves CI job URLs from OpenStack Zuul",
install_requires=required_packages,
packages=['zuul_get'],
url='https://github.com/major/zuul_get',
entry_points='''
[console_scripts]
zuul_get = zuul_get.zuul_get:run '''
)
|
zuul_get
|
/zuul_get-1.2.tar.gz/zuul_get-1.2/setup.py
|
setup.py
|
# zuulfmt
A Zuul/Ansible yaml formatter/prettifier.
## Changes
### 0.2.0
- The fmt function output is new-line terminated
### 0.1.0
- Initial release
|
zuulfmt
|
/zuulfmt-0.2.0.tar.gz/zuulfmt-0.2.0/README.md
|
README.md
|
# Copyright (c) 2020 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import zuulfmt
samples = [("""
- copy:
src: file
dst: new-file
name: copy a file
when: true
- command: test
name: run a command
""", """
- name: copy a file
when: true
copy:
src: file
dst: new-file
- name: run a command
command: test
"""), ("""
- job:
branches:
- master
- f33
- f32
- epel8
description: Check the project has a tests/tests.yml
name: check-for-tests
nodeset:
nodes: []
run: playbooks/rpm/check-for-tests.yaml
- job:
abstract: true
description: Base job for RPM build on Fedora Koji
name: common-koji-rpm-build
nodeset: fedora-33-container
protected: true
provides:
- repo
roles:
- zuul: zuul-distro-jobs
run: playbooks/koji/build-ng.yaml
secrets:
- name: krb_keytab
secret: krb_keytab
timeout: 21600
""", """
- job:
name: check-for-tests
description: Check the project has a tests/tests.yml
run: playbooks/rpm/check-for-tests.yaml
branches:
- master
- f33
- f32
- epel8
nodeset:
nodes: []
- job:
name: common-koji-rpm-build
description: Base job for RPM build on Fedora Koji
run: playbooks/koji/build-ng.yaml
abstract: true
nodeset: fedora-33-container
protected: true
provides:
- repo
roles:
- zuul: zuul-distro-jobs
secrets:
- name: krb_keytab
secret: krb_keytab
timeout: 21600
""")]
def fmt(inp, expected):
got = zuulfmt.fmt(inp)
if got != expected:
print("Got: [" + got + "], wanted: [" + expected + "]")
return False
return True
def test_samples():
assert all([fmt(inp, expected) for inp, expected in samples])
if __name__ == '__main__':
unittest.main()
|
zuulfmt
|
/zuulfmt-0.2.0.tar.gz/zuulfmt-0.2.0/test.py
|
test.py
|
# Copyright (c) 2020 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from setuptools import setup
setup(
name="zuulfmt",
setup_requires=["setuptools_scm"],
use_scm_version=True,
packages=["zuulfmt"],
description="A Zuul/Ansible yaml formatter/prettifier.",
license="ASL v2.0",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
],
entry_points={"console_scripts": ["zuulfmt=zuulfmt:main"]},
)
|
zuulfmt
|
/zuulfmt-0.2.0.tar.gz/zuulfmt-0.2.0/setup.py
|
setup.py
|
====
zuup
====
.. image:: https://travis-ci.org/sileht/zuup.png?branch=master
:target: https://travis-ci.org/sileht/zuup
.. image:: https://img.shields.io/pypi/v/zuup.svg
:target: https://pypi.python.org/pypi/zuup/
:alt: Latest Version
.. image:: https://img.shields.io/pypi/dm/zuup.svg
:target: https://pypi.python.org/pypi/zuup/
:alt: Downloads
Command line to consult Openstack zuul status
* Free software: Apache license
* Documentation: http://zuup.readthedocs.org
* Source: http://github.com/sileht/zuup
* Bugs: http://github.com/sileht/zuup/issues
Installation
------------
At the command line::
$ pip install zuup
Or, if you have virtualenvwrapper installed::
$ mkvirtualenv zuup
$ pip install zuup
Usage
-----
To use zuup::
$ zuup --help
usage: zuup [-h] [-D] [-d] [-w DELAY] [-e EXPIRATION] [-u USERNAME]
[-p PROJECTS] [-c CHANGES] [-l] [-r] [-s] [-j JOB]
optional arguments:
-h, --help show this help message and exit
-D Daemonize and exit if no more reviews
-d Daemonize
-w DELAY refresh delay
-e EXPIRATION review expiration in deamon mode
-u USERNAME Username
-p PROJECTS Projects
-c CHANGES changes
-l local changes
-r current repo changes
-s short output
-j JOB show log of a job of a change
Example
-------
Print jobs of projects::
$ zuup -p openstack/ceilometer -p openstack/gnocchi
[openstack/gnocchi] check[0]: https://review.openstack.org/235161
TEST 01:22:14/00:00:00
- SUCCESS --:--:-- gate-gnocchi-pep8 http://logs.openstack.org/61/235161/4/check/gate-gnocchi-pep8/ac6632a
- SUCCESS --:--:-- gate-gnocchi-docs http://logs.openstack.org/61/235161/4/check/gate-gnocchi-docs/ff085e7
- SUCCESS --:--:-- gate-gnocchi-python27 http://logs.openstack.org/61/235161/4/check/gate-gnocchi-python27/9e3fd5e
- SUCCESS --:--:-- gate-gnocchi-python34 http://logs.openstack.org/61/235161/4/check/gate-gnocchi-python34/afcef87
- SUCCESS --:--:-- gate-gnocchi-bashate http://logs.openstack.org/61/235161/4/check/gate-gnocchi-bashate/f7b10d4
- SUCCESS --:--:-- gate-gnocchi-dsvm-functional-file-mysql http://logs.openstack.org/61/235161/4/check/gate-gnocchi-dsvm-functional-file-mysql/d016760
- ======= 00:00:00 gate-gnocchi-dsvm-functional-swift-postgresql https://jenkins06.openstack.org/job/gate-gnocchi-dsvm-functional-swift-postgresql/263/
- SUCCESS --:--:-- gate-gnocchi-dsvm-functional-ceph-mysql http://logs.openstack.org/61/235161/4/check/gate-gnocchi-dsvm-functional-ceph-mysql/2b54187
- SUCCESS --:--:-- gate-ceilometer-dsvm-integration http://logs.openstack.org/61/235161/4/check/gate-ceilometer-dsvm-integration/a937fd5
[openstack/ceilometer] check[0]: https://review.openstack.org/235202
Merge tag '5.0.0' 01:02:46/00:09:20
- SUCCESS --:--:-- gate-ceilometer-pep8 http://logs.openstack.org/02/235202/1/check/gate-ceilometer-pep8/bac67ce
- SUCCESS --:--:-- gate-ceilometer-docs http://logs.openstack.org/02/235202/1/check/gate-ceilometer-docs/1d1eb96
- FAILURE --:--:-- gate-ceilometer-python27 http://logs.openstack.org/02/235202/1/check/gate-ceilometer-python27/d993423
- FAILURE --:--:-- gate-ceilometer-python34 http://logs.openstack.org/02/235202/1/check/gate-ceilometer-python34/5ee29b5
- SUCCESS --:--:-- gate-tempest-dsvm-ceilometer-mongodb-full http://logs.openstack.org/02/235202/1/check/gate-tempest-dsvm-ceilometer-mongodb-full/a55e9e6
- ======. 00:09:20 gate-tempest-dsvm-ceilometer-mysql-neutron-full https://jenkins06.openstack.org/job/gate-tempest-dsvm-ceilometer-mysql-neutron-full/114/
- ======= 00:00:00 gate-tempest-dsvm-ceilometer-mysql-full https://jenkins03.openstack.org/job/gate-tempest-dsvm-ceilometer-mysql-full/36/
- SUCCESS --:--:-- gate-tempest-dsvm-ceilometer-postgresql-full http://logs.openstack.org/02/235202/1/check/gate-tempest-dsvm-ceilometer-postgresql-full/a1eee16
- ======= 00:00:00 gate-ceilometer-dsvm-functional-mongodb https://jenkins03.openstack.org/job/gate-ceilometer-dsvm-functional-mongodb/275/
- ======= 00:00:00 gate-ceilometer-dsvm-functional-postgresql https://jenkins05.openstack.org/job/gate-ceilometer-dsvm-functional-postgresql/146/
- SUCCESS --:--:-- gate-grenade-dsvm-ceilometer http://logs.openstack.org/02/235202/1/check/gate-grenade-dsvm-ceilometer/383ecfb
- SUCCESS --:--:-- gate-ceilometer-dsvm-integration http://logs.openstack.org/02/235202/1/check/gate-ceilometer-dsvm-integration/6758820
...
Print jobs of an user::
$ zuup -u sileht
$ zuup -u sileht -d # Run it in loop
Print jobs of a change-id::
$ zuup -c 235161
or
$ zuup -c https://review.openstack.org/235207
Print jobs of change-ids on your local git branch::
$ zuup -l
Print jobs resume ::
$ zuup -c https://review.openstack.org/235207 -s
[openstack/ceilometer] check[0]: https://review.openstack.org/235207 Switch to post-versioning 00:59:40/00:04:08 SSFSSSSPPSS
- FAILURE --:--:-- gate-ceilometer-python27 http://logs.openstack.org/07/235207/1/check/gate-ceilometer-python27/546a067
Print running and failed jobs only ::
$ zuup -c https://review.openstack.org/235207 -R
[openstack/ceilometer] check[0]: https://review.openstack.org/235207
Switch to post-versioning 01:00:18/00:03:30
- FAILURE --:--:-- gate-ceilometer-python27 http://logs.openstack.org/07/235207/1/check/gate-ceilometer-python27/546a067
- ======= 00:00:00 gate-ceilometer-dsvm-functional-mongodb https://jenkins03.openstack.org/job/gate-ceilometer-dsvm-functional-mongodb/276/
- ======. 00:03:30 gate-ceilometer-dsvm-functional-postgresql https://jenkins04.openstack.org/job/gate-ceilometer-dsvm-functional-postgresql/140/
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/README.rst
|
README.rst
|
If you would like to contribute to the development, just send github pull requests.
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/CONTRIBUTING.rst
|
CONTRIBUTING.rst
|
#!/bin/bash
set -e
set -x
version=$1
[ ! "$version" ] && version=$(python setup.py --version | sed 's/\.dev.*//')
status=$(git status -sz)
[ -z "$status" ] || false
git checkout master
tox -epy35,py27,pep8
git push
git tag -s $version -m "Release version ${version}"
git checkout $version
git clean -fd
pbr_version=$(python setup.py --version)
if [ "$version" != "$pbr_version" ]; then
echo "something goes wrong pbr version is different from the provided one. ($pbr_version != $version)"
exit 1
fi
python setup.py sdist bdist_wheel
set +x
echo
echo "release: zuup ${version}"
echo
echo "SHA1sum: "
sha1sum dist/*
echo "MD5sum: "
md5sum dist/*
echo
echo "uploading..."
echo
set -x
read
git push --tags
twine upload -r pypi -s dist/zuup-${version}.tar.gz dist/zuup-${version}-py2.py3-none-any.whl
git checkout master
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/releases.sh
|
releases.sh
|
zuup Style Commandments
===============================================
Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/HACKING.rst
|
HACKING.rst
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/setup.py
|
setup.py
|
============
Contributing
============
.. include:: ../../CONTRIBUTING.rst
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/doc/source/contributing.rst
|
contributing.rst
|
========
Usage
========
To use zuup::
zuul --help
usage: zuul [-h] [-D] [-d] [-w DELAY] [-e EXPIRATION] [-u USERNAME]
[-p PROJECTS] [-c CHANGES] [-l] [-r] [-s] [-j JOB]
optional arguments:
-h, --help show this help message and exit
-D Daemonize and exit if no more reviews
-d Daemonize
-w DELAY refresh delay
-e EXPIRATION review expiration in deamon mode
-u USERNAME Username
-p PROJECTS Projects
-c CHANGES changes
-l local changes
-r current repo changes
-s short output
-j JOB show log of a job of a change
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/doc/source/usage.rst
|
usage.rst
|
.. zuup documentation master file, created by
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to zuup's documentation!
========================================================
Contents:
.. toctree::
:maxdepth: 2
readme
contributing
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/doc/source/index.rst
|
index.rst
|
.. include:: ../../README.rst
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/doc/source/readme.rst
|
readme.rst
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
#'sphinx.ext.intersphinx',
# 'oslosphinx'
]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'zuup'
copyright = u'2013, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}
|
zuup
|
/zuup-1.0.7.tar.gz/zuup-1.0.7/doc/source/conf.py
|
conf.py
|
from http.server import \
SimpleHTTPRequestHandler, \
HTTPServer
from socketserver import ThreadingMixIn
import sys
from os import path
import os
class ThreadingHTTPServer(ThreadingMixIn, HTTPServer):
daemon_threads = True
def main():
os.chdir(path.dirname(__file__))
port = sys.argv[1] if len(sys.argv) > 1 else 3000
addr = ('localhost', int(port))
SimpleHTTPRequestHandler.protocol_version = "HTTP/1.1"
svr = ThreadingHTTPServer(addr, SimpleHTTPRequestHandler)
print(f"Serving HTTP on localhost port {port} ...")
svr.serve_forever()
if __name__ == '__main__': main()
|
zuxian-liaoyu
|
/zuxian_liaoyu-2022.9.14.0-py3-none-any.whl/ZuxianLiaoyu/__main__.py
|
__main__.py
|
# 祖先疗愈
## 下载
### Docker
```
docker pull apachecn0/zuxian-liaoyu
docker run -tid -p <port>:80 apachecn0/zuxian-liaoyu
# 访问 http://localhost:{port} 查看文档
```
### PYPI
```
pip install zuxian-liaoyu
zuxian-liaoyu <port>
# 访问 http://localhost:{port} 查看文档
```
### NPM
```
npm install -g zuxian-liaoyu
zuxian-liaoyu <port>
# 访问 http://localhost:{port} 查看文档
```
|
zuxian-liaoyu
|
/zuxian_liaoyu-2022.9.14.0-py3-none-any.whl/ZuxianLiaoyu/README.md
|
README.md
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""zuxian-liaoyu
https://github.com/apachecn/zuxian-liaoyu"""
__author__ = "ApacheCN"
__email__ = "[email protected]"
__license__ = "CC BY-NC-SA 4.0"
__version__ = "2022.9.14.0"
|
zuxian-liaoyu
|
/zuxian_liaoyu-2022.9.14.0-py3-none-any.whl/ZuxianLiaoyu/__init__.py
|
__init__.py
|
import requests
import json
class Deadline:
def __init__(self, date, course, description, opportunity, meta):
self.date = date
self.course = course
self.description = description
self.opportunity = opportunity
self.meta = meta
class Lesson:
def __init__(self, start, end, course, location, teacher, meta):
self.start = start
self.end = end
self.course = course
self.location = location
self.teacher = teacher
self.meta = meta
class Meta:
def __init__(self, last_update, user):
self.last_update = last_update
self.user = user
class APIConnection:
def __init__(self, key):
self.base_url = 'https://app.zuydbot.cc/api/v2'
self.key = key
self.deadlines = None
self.lessons = None
self.test_connection()
def test_connection(self):
try:
r = requests.get(self.base_url, timeout=15)
except requests.exceptions.ReadTimeout:
raise TimeoutError('Connected timed out.')
if r.status_code is not 200:
raise ConnectionError('Cannot reach API (HTTP {}).'.format(r.status_code))
def send_request(self, module):
try:
r = requests.get('{}/{}'.format(self.base_url, module), headers={'key': self.key}, timeout=15)
except requests.exceptions.ReadTimeout:
raise TimeoutError('Connected timed out.')
if r.status_code is not 200:
raise ConnectionError('Cannot reach API (HTTP {}).'.format(r.status_code))
response = json.loads(r.content.decode('utf-8'))
return response['deadlines'], response['meta']
def get_deadlines(self):
deadlines, meta = self.send_request('deadlines')
deadline_list = []
metadata = Meta(last_update=meta['last-update'], user=meta['user'])
for deadline in deadlines:
deadline_list.append(Deadline(date=deadline['date'], course=deadline['course'], meta=metadata,
description=deadline['description'], opportunity=deadline['opportunity']))
self.deadlines = deadline_list
def get_lessons(self):
lessons, meta = self.send_request('lessons')
lesson_list = []
metadata = Meta(last_update=meta['last-update'], user=meta['user'])
for lesson in lessons:
lesson_list.append(Lesson(start=lesson['start-time'], end=lesson['end-time'], course=lesson['course'],
location=lesson['location'], teacher=lesson['teacher'], meta=metadata))
self.lessons = lesson_list
|
zuydbot-api
|
/zuydbot_api-0.1-py3-none-any.whl/zuydbot_api/APIConnection.py
|
APIConnection.py
|
from .APIConnection import APIConnection
|
zuydbot-api
|
/zuydbot_api-0.1-py3-none-any.whl/zuydbot_api/__init__.py
|
__init__.py
|
# zuz
Two zuzim
|
zuz
|
/zuz-0.0.1.tar.gz/zuz-0.0.1/README.md
|
README.md
|
from setuptools import setup, find_packages
with open("README.md", "r") as readme_file:
readme = readme_file.read()
requirements = ["numpy", "pandas"]
setup(
name="zuz",
version="0.0.1",
author="Eyal Gal",
author_email="[email protected]",
description="Dezabin aba bitrei zuzei",
long_description=readme,
long_description_content_type="text/markdown",
keywords=[],
url="https://github.com/gialdetti/zuz",
packages=find_packages(),
install_requires=requirements,
include_package_data=True,
# package_data={"datasets": ["zuz/resources/*"]},
classifiers=[
"Programming Language :: Python :: 3.10",
],
)
|
zuz
|
/zuz-0.0.1.tar.gz/zuz-0.0.1/setup.py
|
setup.py
|
# ZuzuVibhu
The module provides the zuzu package for Vibhu Agarwal.\
Zuzu is a unique language defined by Vibhu Agarwal himself.\
The language is in no way related to other standard languages understood in public which is not specifically defined by Vibhu Agarwal.
Happy Go Zuzus!
## Installing the package
```
pip install zuzuvibhu
```
## Using the package
```
>>> import zuzuvibhu
>>> zuzuvibhu.get_zuzus()
```
Go to http://localhost:5000/ to get the response in HTML\
or you may visit http://localhost:5000/api to get the text in JSON format.
|
zuzuvibhu
|
/zuzuvibhu-1.0.4.tar.gz/zuzuvibhu-1.0.4/README.md
|
README.md
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
with open('requirements.txt') as f:
requirements = f.readlines()
setuptools.setup(
name="zuzuvibhu",
version="1.0.4",
author="Vibhu Agarwal",
author_email="[email protected]",
description="Zuzu Package of Vibhu Agarwal",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Vibhu-Agarwal/zuzuvibhu",
packages=setuptools.find_packages(),
license='MIT',
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
keywords='zuzu vibhu agarwal vibhuagarwal',
install_requires=requirements
)
|
zuzuvibhu
|
/zuzuvibhu-1.0.4.tar.gz/zuzuvibhu-1.0.4/setup.py
|
setup.py
|
#!/usr/bin/python
# import module
from __future__ import print_function
from math import *
import argparse
import mechanize
import cookielib
import sys
import bs4
import requests
import os
import glob
import random
import time
reload(sys)
sys.setdefaultencoding('utf-8')
__VERSION__ = '0.1.3 (in development)'
__BOTNAME__ = 'zvBot' # default botname
__LICENSE__ = '''
MIT License
Copyright (c) 2018 Noval Wahyu Ramadhan <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
# lambda
sprt = lambda: logger('-'*arg.long_separator, sprt=True)
# super user
ADMIN = []
# blacklist user
BLACKLIST = []
# Command options
SINGLE_COMMAND = ['@quit', '@help']
NOT_SINGLE_COMMAND = [ '@calc', '@igstalk', '@img',
'@tr', '@igd', '@wiki',
'@sgb_quote', '@tanpakertas_quote', '@rasa_quote',
'@img_quote', '@kbbi',
'@lyrics' ]
COMMANDS = NOT_SINGLE_COMMAND + SINGLE_COMMAND
BLACKLIST_COMMAND = []
# helper
HELP_TEXT = [ 'commands:\n',
' - @help : show this help message.',
' - @kbbi <word> : search entries for a word/phrase in KBBI Online.',
' - @lyrics <song title> : look for the lyrics of a song',
' - @img <query> : look for images that are relevant to the query.',
' - @calc <value> : do mathematical calculations.',
' - @igd <url> : download Instagram photos from url.',
' - @sgb_quote <quote> : SGB quote maker.',
' - @rasa_quote <quote> : rasa untukmu quote maker.',
' - @tanpakertas_quote <quote> : tanpa kertas quote maker.',
' - @img_quote <quote> : IMG quote maker.',
' - @wiki <word> : search for word definitions in wikipedia.',
' - @tr <text> : translate any language into English.',
' - @igstalk <username> : View user profiles on Instagram.',
'<br>Example:\n',
' - @kbbi makan',
' - @img random',
' - @lyrics eminem venom',
' - @calc 1+2+3+4+5',
' - @sgb_quote write your quote here!',
' - @igd https://instagram.com/p/<code>',
' - @tr halo dunia',
' - @wiki wibu',
' - @wiki kpop' ]
def command(mess, name = ''):
me = mess[0]
if me == '@lyrics':
query = '{}'.format(' '.join(mess[1:]))
hdr = {'User-Agent': 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
r = requests.get('https://search.azlyrics.com/search.php',
params = {'q': query,
'w': 'songs'},
headers = hdr)
url = bs4.BeautifulSoup(r.text, 'html.parser').find('td', {'class': 'text-left visitedlyr'})
if not url:
r = requests.get('https://www.lyricsmode.com/search.php',
params = {'search': query},
headers = hdr)
soup = bs4.BeautifulSoup(r.text, 'html.parser')
url = soup.find('a', {'class': 'lm-link lm-link--primary lm-link--highlight'})
if not url:
return 'lyrics can\'t be found'
r = requests.get('https://www.lyricsmode.com{}'.format(url.attrs['href']))
soup = bs4.BeautifulSoup(r.text, 'html.parser')
return '{0}\n\n{1}'.format(
' - '.join([i.text[1:] for i in soup.find('ul', {'class': 'breadcrumb'}).findAll('li')[-2:]])[:-7],
soup.find('p', {'class': 'ui-annotatable js-lyric-text-container'}).text[29:])
r = requests.get(url.a.attrs['href'])
soup = bs4.BeautifulSoup(r.text, 'html.parser')
return '{0}\n{1}'.format(
soup.title.text[:-22],
soup.findAll('div')[21].text)
elif me == '@kbbi':
url = 'https://kbbi.kemdikbud.go.id/entri/{}'.format(' '.join(mess[1:]))
raw = requests.get(url).text
if "Entri tidak ditemukan." in raw:
return 'entry not found: {}'.format(' '.join(mess[1:]))
arti = []
arti_contoh = []
isolasi = raw[raw.find('<h2>'):raw.find('<h4>')]
soup = bs4.BeautifulSoup(isolasi, 'html.parser')
entri = soup.find_all('ol') + soup.find_all('ul')
for tiap_entri in entri:
for tiap_arti in tiap_entri.find_all('li'):
kelas = tiap_arti.find(color="red").get_text().strip()
arti_lengkap = tiap_arti.get_text().strip()[len(kelas):]
if ':' in arti_lengkap:
arti_saja = arti_lengkap[:arti_lengkap.find(':')]
else:
arti_saja = arti_lengkap
if kelas:
hasil = '({0}) {1}'
else:
hasil = '{1}'
arti_contoh.append(hasil.format(kelas, arti_lengkap))
arti.append(hasil.format(kelas, arti_saja))
return '\n'.join(arti).replace('(n)', '( n )')
elif me == '@tr':
params = {
'hl':'id',
'sl':'auto',
'tl':'en',
'ie':'UTF-8',
'prev':'_m',
'q':' '.join(mess[1:])
}
url = 'https://translate.google.com/m'
r = requests.get(url, params=params)
soup = bs4.BeautifulSoup(r.text, 'html.parser')
return soup.find(class_='t0').text
elif me == '@wiki':
m = False
url = 'https://id.m.wikipedia.org/wiki/' + '_'.join(mess[1:])
r = requests.get(url)
soup = bs4.BeautifulSoup(r.text, 'html.parser')
res = '$'
temp = ''
if soup.find('p'):
if 'dapat mengacu kepada beberapa hal berikut:' in soup.find('p').text or 'bisa merujuk kepada' in soup.find('p').text:
temp += soup.find('p').text + '\n'
for i in soup.find_all('li'):
if 'privasi' in i.text.lower():
m = False
if m:
temp += '- ' + i.text + '\n'
if 'baca dalam' in i.text.lower():
m = True
else:
paragraph = 6 if arg.paragraph >= 6 else arg.paragraph
for i in soup.find_all('p')[:paragraph]:
if 'akurasi' in i.text.lower():
pass
else:
temp += i.text + '\n\n'
res += temp
res += '<br>read more: ' + r.url
if '$<br>' in res:
res = ' sorry, I can\'t find the definition of "%s"' % ' '.join(mess[1:])
return res[1:]
elif me == '@help':
res = 'Hello %s, ' % (' '.join([i.capitalize() for i in name.split()]))
res += 'you are admin now\n\n' if name in ADMIN else 'have a nice day\n\n'
for x in HELP_TEXT:
c = x.split()
if len(c) > 2:
if x.split()[1] in COMMANDS:
if name in ADMIN:
res += x + '\n'
elif x.split()[1] not in BLACKLIST_COMMAND:
res += x + '\n'
else:
res += x + '\n'
return res
# --------------- unknow ----------------- #
def updt(progress, total):
indi = '\x1b[32m#\x1b[0m' if arg.color else '#'
barLength, status = 25, '%s/%s' % (convertSize(progress), convertSize(total))
progress = float(progress) / float(total)
block = int(round(barLength * progress))
text = "\r{:<9}[{}] {} [{:.0f}%]".format(
'PROGRESS',
indi * block + "-" * (barLength - block),
status,
round(progress * 100, 0)
)
sys.stdout.write(text)
sys.stdout.flush()
def convertSize(n, format='%(value).1f %(symbol)s', symbols='customary'):
SYMBOLS = {
'customary': ('B', 'K', 'Mb', 'G', 'T', 'P', 'E', 'Z', 'Y'),
'customary_ext': ('byte', 'kilo', 'mega', 'giga', 'tera', 'peta', 'exa',
'zetta', 'iotta'),
'iec': ('Bi', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi',
'zebi', 'yobi'),
}
n = int(n)
if n < 0:
raise ValueError("n < 0")
symbols = SYMBOLS[symbols]
prefix = {}
for i, s in enumerate(symbols[1:]):
prefix[s] = 1 << (i + 1) * 10
for symbol in reversed(symbols[1:]):
if n >= prefix[symbol]:
value = float(n) / prefix[symbol]
return format % locals()
return format % dict(symbol=symbols[0], value=n)
def parse_url(url):
return url[8 if url.startswith('https://') else 7:].split('/')[0]
def get_file(url, name = 'zvBot.jpeg'):
logger('downloading file from %s' % parse_url(url))
r = requests.get(url, stream=True)
file_size = len(r.content)
downloaded = 0
with open(name, 'wb') as f:
for i in r.iter_content(1024):
if buffer:
updt(downloaded, file_size)
f.write(i)
f.flush()
downloaded += len(i)
print ('') # new line
return True
# -------------- starting bot ---------------- #
class start_bot:
def __init__(self, username, password):
self.url = 'https://mbasic.facebook.com'
self.username = username
self.password = password
self.image_numbers = self.get_last_images()
self.burl = None
# user config
self.config = {
'blacklist':{},
'last':{},
'limit':{},
'botname':{}
}
self.br = self.setup()
self.login()
def run_a_bot(self):
self.br.open(self.url + '/messages/read')
name = False
for i in self.br.links():
if name:
self.name = i.text.lower().split(' (')[0]
# added new user
if self.name not in self.config['last'].keys():
self.config['blacklist'][self.name] = False
self.config['limit'][self.name] = 0
self.config['botname'][self.name] = __BOTNAME__
self.config['last'][self.name] = 'unknow'
if not self.config['blacklist'][self.name]:
logger('choose chat from %s' % i.text)
if self.name not in BLACKLIST or self.config['blacklist'][self.name]:
self.burl = self.url + i.url
break
else:
logger('blacklist user detected, skipped\n%s' % '-'*arg.long_separator, 'WARNING')
self.config['blacklist'][self.name] = True
break
if 'Cari pesan' == i.text:
name = True
if self.burl:
for _ in range(arg.refresh):
shell = True
allow = True
not_igd_and_set = True
text = self.br.open(self.burl).read()
for x in self.br.links():
if arg.group_chat:
if x.text.lower()[-5:] == 'orang':
logger('group chat detected, skipped', 'WARNING')
sprt()
self.config['blacklist'][self.name] = True
break
else:
break
soup = bs4.BeautifulSoup(text, 'html.parser')
m = soup.find_all('span')
com = ['']
for num, i in enumerate(m):
if 'dilihat' in i.text.lower():
if m[num-3].text[:3].lower() == '@igd' and 'instagram' in m[num-3].text and len(m[num-3].text.split('/')) > 4:
logger('receive command: @igd')
not_igd_and_set = False
self.config['last'][self.name] = '@igd'
logger('make a requests')
ig_code = m[num-3].text.split('/')[4][1:]
logger('code: %s', ig_code)
self.get_file_from_instagram(ig_code)
break
not_com = m[num-3].text.lower()
com = not_com.split()
break
if self.config['last'][self.name] and _ == 0 and not self.config['blacklist'][self.name]:
logger('last command: %s' % self.config['last'][self.name])
if len(com) == 1 and com[0] in NOT_SINGLE_COMMAND:
shell = False
try:
if self.config['limit'][self.name] == arg.limit and self.name not in ADMIN and not self.config['blacklist'][self.name] and com[0] in COMMANDS:
logger('user has exceeded the limit')
self.send('You have reached the usage limit')
self.config['blacklist'][self.name] = True
if com[0] in COMMANDS and com[0] != '@help':
self.config['limit'][self.name] += 1
if com[0] in BLACKLIST_COMMAND:
allow = False
if not self.config['blacklist'][self.name] and self.name not in ADMIN:
logger('receive command: %s' % com[0])
self.send('sorry, this command has been disabled by admin')
if self.name in ADMIN:
allow = True
# execute
if com[0] in COMMANDS and shell and allow:
if com[0] != '@igd' and not self.config['blacklist'][self.name]:
self.bcom = com[0]
self.config['last'][self.name] = com[0]
c_m = com[0]
logger('receive command: %s' % c_m)
if not_igd_and_set and com[0] != '@quit':
if com[0] in NOT_SINGLE_COMMAND or '_quote' in com[0]:
logger('value:%s' % not_com.replace(com[0],''))
logger('make a requests')
if com[0] == '@img':
self.send_image(get_file('https://source.unsplash.com/640x640/?' + not_com.replace(com[0],'')[1:]))
sprt()
elif com[0] == '@calc':
try:
i = ''
for x in not_com:
if x.isdigit() or x in ['/', '*', '+', '-', '%']:
i += x
res = eval(i)
self.send('%s\n\n= %s' % (not_com[6:],res))
except (NameError,SyntaxError):
self.send('invalid value: %s' % not_com[6:])
elif '_quote' in com[0]:
self.send_image(self.quote(' '.join(com[1:])))
sprt()
elif com[0] == '@igstalk':
self.ig_stalk(com[1])
else:
self.send(command(com, self.name))
elif com[0] == '@quit':
if self.name in ADMIN:
self.send('bot stopped, thank you for chatting with me ^^')
exit('stopped bot\n' + '-'*arg.long_separator)
else:
self.send('You are not an admin, access is denied')
except IndexError:
pass
# ------------- other tool ------------ #
def ig_stalk(self,username):
text = requests.get('https://insta-stalker.com/profile/%s' % username).text
soup = bs4.BeautifulSoup(text, 'html.parser')
try:
data = {'profile_url':soup.find(class_='profile-img').attrs['src'],
'bio':'',
'data':{'following':0, 'followers':0, 'posts':0}}
for num,i in enumerate(soup.find_all('p')[:-2]):
if 'http' not in i.text and num == 1:
break
data['bio'] += i.text + '\n\n'
if 'private' not in data['bio']:
for num,i in enumerate(soup.find_all('script')[8:][:9]):
if 'var' in i.text:
break
data['data'][data['data'].keys()[num]] = i.text[:-3].split('(')[-1]
self.send_image(get_file(data['profile_url']))
self.send('%s\nFollowing: %s\nFollowers: %s\nPosts: %s' % (data['bio'][:-1], data['data']['following'], data['data']['followers'], data['data']['posts']))
except AttributeError:
self.send('invalid username: %s' % username)
def quote(self, quote = 'hello world!'):
link = 'http://shiroyasha.tech/?tools='
if self.bcom == '@sgb_quote':
link = 'https://wirayudaaditya.site/quotes/?module='
xs = 'sgbquote'
elif self.bcom == '@tanpakertas_quote':
xs = 'tanpakertas_'
elif self.bcom == '@rasa_quote':
xs = 'rasauntukmu'
elif self.bcom == '@img_quote':
link = 'https://wirayudaaditya.site/quotes/?module='
xs = 'autoquotemaker'
self.br.open(link + xs)
self.br.select_form(nr=0)
self.br.form['quote'] = quote
if self.bcom in ('@sgb_quote','@tanpakertas_quote','@img_quote'):
self.br.form['copyright'] = self.name
res = self.br.submit().read()
soup = bs4.BeautifulSoup(res, 'html.parser')
if self.bcom in ('@img_quote', '@sgb_quote'):
open('zvBot.jpeg', 'wb').write(soup.find_all('a')[-1].img['src'].split(',')[1].decode('base64'))
else:
open('zvBot.jpeg', 'wb').write(soup.find_all('center')[1].img['src'].split(',')[1].decode('base64'))
return True
# --------------- other functions ------- #
def upload_file(self,name):
logger('uploading file')
r = requests.post('https://www.datafilehost.com/upload.php',
files={'upfile':open(name,'rb')} )
return str(bs4.BeautifulSoup(r.text,'html.parser').find('tr').input['value'])
def get_last_commands(self):
_ = False
self.br.open(self.url + '/messages/read')
for i in self.br.links():
if 'Lihat Pesan Sebelumnya' == i.text:
break
if _:
name = i.text.lower().split(' (')[0]
self.config['limit'][name] = 0
self.config['blacklist'][name] = False
self.config['botname'][name] = __BOTNAME__
self.config['last'][name] = 'unknow'
if arg.admin:
ADMIN.append(name)
if 'Cari pesan' == i.text:
_ = True
def get_last_images(self):
x = 1
for i in glob.glob(arg.dir_cache+'/image_*.jpeg'):
num = int(i.split('/')[-1].split('_')[1][:-5]) + 1
if num >= x:
x = num
return x
def get_file_from_instagram(self, code):
try:
r = requests.get('https://www.instagram.com/p/'+code, params={'__a': 1}).json()
media = r['graphql']['shortcode_media']
if media['is_video']:
self.send('sorry, i can\'t download other than images')
else:
if media.get('edge_sidecar_to_children', None):
self.send('downloading multiple images of this post')
for child_node in media['edge_sidecar_to_children']['edges']:
self.send_image(get_file(child_node['node']['display_url']), 'zvBot.jpeg')
else:
self.send('downloading single image')
self.send_image(get_file(media['display_url']), 'zvBot.jpeg')
sprt()
except (KeyError, ValueError):
self.send('invalid code: %s' % code)
# ----------- send command ------------- #
def send(self, temp):
n = True
if arg.botname and temp.split()[0] != 'download' or 'wait a minute' not in temp:
temp += ('\n\n- via {0} | limit {1}/{2}'.format(
self.config['botname'][self.name],
self.config['limit'][self.name], arg.limit))
for message in temp.split('<br>'):
logger('sending message: %s' % message)
self.br.select_form(nr=1)
self.br.form['body'] = message.capitalize()
self.br.submit()
logger('result: success')
if 'download' in message.lower() or 'wait a minute' in message.lower():
n = False
if 'example' in message.lower():
n = True
if n:
sprt()
def send_image(self, image, x = 'zvBot.jpeg'):
if '_quote' in self.bcom:
self.br.open(self.burl)
logger('send pictures to the recipient')
if arg.cache:
logger('picture name: image_%s.jpeg' % self.image_numbers)
self.br.select_form(nr=1)
self.br.open(self.br.click(type = 'submit', nr = 2))
self.br.select_form(nr=0)
self.br.form.add_file(open(x), 'text/plain', x, nr=0)
self.br.submit()
logger('result: success')
if image:
if arg.cache:
os.rename(str(x), 'image_%s.jpeg' % self.image_numbers)
if arg.up_file:
self.send('hd image: ' + self.upload_file('image_%s.jpeg' % self.image_numbers))
os.system('mv image_%s.jpeg %s' % (self.image_numbers, arg.dir_cache))
else:
os.remove(x)
self.image_numbers +=1
# ----------- Useless function --------- #
def search_(self):
data = {}
logger('search for the latest chat history\n', 'DEBUG')
self.br.open(self.url + '/messages/read')
xD = False
num = 1
for i in self.br.links():
if 'Lihat Pesan Sebelumnya' == i.text:
break
if xD:
print ('%s) %s' % (num, i.text.lower().split(' (')[0]))
data[num] = {'url': i.url, 'name': i.text.lower().split(' (')[0]}
num += 1
if 'Cari pesan' == i.text:
xD = True
return data
def select_(self):
data = self.search_()
final_ = []
n = []
user_ = raw_input('\nenter numbers [1-%s] : ' % len(data))
for x in user_.split(','):
if int(x) in range(len(data) + 1) and x not in n:
final_.append(data[int(x)])
n.append(x)
sprt()
logger('total selected : %s' % len(final_), 'DEBUG')
sprt()
return final_
def delete_(self):
res = self.select_()
for i in res:
logger('delete messages from %s' % i['name'])
self.br.open(self.url + i['url'])
self.br.select_form(nr=2)
self.br.open(self.br.click(type = 'submit', nr = 1))
self.br.open(self.url + self.br.find_link('Hapus').url)
logger('finished all')
# ----------- Browser Options ---------- #
def setup(self):
self.__ = False
xd = os.uname()
logger('build a virtual server (%s %s)' % (xd[0], xd[-1]), 'DEBUG')
br = mechanize.Browser()
self.cookie = cookielib.LWPCookieJar()
if arg.cookie and not arg.own_bot:
logger('use external cookies', 'DEBUG')
self.cookie.load(arg.cookie)
self.__ = True
br.set_handle_robots(False)
br.set_handle_equiv(True)
br.set_handle_referer(True)
br.set_handle_redirect(True)
br.set_cookiejar(self.cookie)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time = 5)
br.addheaders = [('user-agent', arg.ua)]
br.open(self.url)
return br
def login(self):
logger('make server configuration', 'DEBUG')
if not self.__ and arg.own_bot:
self.br.select_form(nr=0)
self.br.form['email'] = self.username
self.br.form['pass'] = self.password
self.br.submit()
self.br.select_form(nr = 0)
self.br.submit()
if 'login' not in self.br.geturl():
for i in self.br.links():
if 'Keluar' in i.text:
name = i.text.replace('Keluar (', '')[:-1]
logger('server is running (%s)' % name, 'DEBUG')
logger('Press Ctrl-C to quit.', 'DEBUG')
sprt()
if arg.info:
logger('settings\n')
for num,i in enumerate(arg.__dict__):
print ('arg.{0:<20} {1}'.format(i+':', arg.__dict__[i]))
print ('') # new line
sprt()
if arg.save_cookie and not arg.cookie:
res = name.replace(' ', '_') + '.cj'
self.cookie.save(res)
logger('save the cookie in %s' % res, 'DEBUG')
sprt()
if not os.path.isdir(arg.dir_cache):
logger('create new cache directory', 'DEBUG')
os.mkdir(arg.dir_cache)
sprt()
self.get_last_commands()
if not arg.delete_chat:
while True:
self.run_a_bot()
logger('refresh', 'DEBUG')
sprt()
else:
self.delete_()
sprt()
exit()
else:
logger('failed to build server', 'ERROR')
sprt()
def logger(mess, level='INFO', ex=False, sprt=False):
mess = mess.lower().encode('utf8')
code = {'INFO' : '38;5;2',
'DEBUG': '38;5;11',
'ERROR': 31,
'WARNING': 33,
'CRITICAL':41}
if arg.underline:
mess = mess.replace(arg.underline, '\x1b[%sm%s\x1b[0m' % ('4;32' if arg.color else '4' , arg.underline))
message = '{0:<9}{1}'.format(level + ' ' if not sprt else ('-'*9), mess[:-9] if sprt else mess)
print ('\r{1}{0}'.format(message.replace(level, '\x1b[%sm%s\x1b[0m' % (code[level], level)) if arg.color else message, time.strftime('%H:%M:%S ') if arg.time and not sprt else ''))
if arg.log:
if not os.path.isfile(arg.log):
open(arg.log, 'a').write('# create a daily report | %s v%s\n# %s\n' % (__BOTNAME__, __VERSION__, time.strftime('%c')))
with open(arg.log, 'a') as f:
if arg.underline:
message = message.replace('\x1b[{0}m{1}\x1b[0m'.format(
'4;32' if arg.color else '4' , arg.underline
),
arg.underline
)
f.write('\n{0}{1}{2}'.format(
time.strftime('%H:%M:%S ') if not sprt else '',
message.replace('-'*arg.long_separator, '-'*30),
'' if not ex else '\n')
)
if ex:
exit()
def main():
global __BOTNAME__, __LICENSE__, cookie, arg, user, pwd
parse = argparse.ArgumentParser(usage='python2 zvbot [--run] (--cookie PATH | --account USER:PASS) [options]', description='description:\n create a virtual server for Bot Messenger Facebook with a personal account', formatter_class=argparse.RawTextHelpFormatter, epilog='author:\n zevtyardt <[email protected]>\n ')
parse.add_argument('-r', '--run', dest='run', action='store_true', help='run the server')
value = parse.add_argument_group('value arguments')
value.add_argument('--account', metavar='USER:PASS', dest='own_bot', help='create your own bot account')
value.add_argument('--botname', metavar='NAME', dest='default_botname', help='rename your own bot, default %s' % __BOTNAME__)
value.add_argument('--blacklist', metavar='NAME', dest='add_blacklist_user', action='append', help='add a new blacklist user by name')
value.add_argument('--cookie', metavar='PATH', dest='cookie', help='use our own cookie')
value.add_argument('--dirname', metavar='DIRNAME', dest='dir_cache', action='append', help='name of directory is used to store images', default='cache_image')
value.add_argument('--ignore-cmd', metavar='COMMAND', dest='ignore_command', help='adding a prohibited command', choices=COMMANDS)
value.add_argument('--limit', metavar='INT', dest='limit', help='limit of request from the user, default 4', type=int, default=4)
value.add_argument('--logfile', metavar='PATH', dest='log', help='save all logs into the file')
value.add_argument('--long-sprt',metavar='INT',dest='long_separator', help='long separating each session, min 20 max 30', type=int, default=30, choices=range(20,31))
value.add_argument('--new-admin', metavar='NAME', dest='add_admin', action='append', help='add new admin by name')
value.add_argument('--paragraph', metavar='INT', dest='paragraph', help='paragraph number on wikipedia, max 6', type=int, default=2)
value.add_argument('--refresh', metavar='INT', dest='refresh', help='how many times the program refreshes the page', type=int, default=8)
value.add_argument('--underline', metavar='WORD', dest='underline', help='underline the specific word in all logs')
value.add_argument('--user-agent', metavar='UA', dest='ua', help='specify a custom user agent', default='Mozilla/5.0 (Linux; Android 7.0; 5060 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.109 Mobile Safari/537.36')
choice = parse.add_argument_group('choice arguments')
choice.add_argument('--all-admin', dest='admin', action='store_true', help='everyone can use admin command')
choice.add_argument('--clear-screen', dest='clear_screen', action='store_true', help='clean the screen before running the bot')
choice.add_argument('--color', dest='color', action='store_true', help='show colors in all logs')
choice.add_argument('--delete-chat', dest='delete_chat', action='store_true', help='delete the latest chat history')
choice.add_argument('--delete-logfile', dest='delete_logfile', action='store_true', help='delete old logs and create new logs')
choice.add_argument('--ignore-botname', dest='botname', action='store_false', help='don\'t add the bot name to the final result')
choice.add_argument('--ignore-cache', dest='cache', action='store_false', help='does not store all images from the sender\'s request')
choice.add_argument('--ignore-group', dest='group_chat', action='store_true', help='ignore existing chat groups')
choice.add_argument('-i', '--info', dest='info', action='store_true', help='showing any information')
choice.add_argument('-l', '--license', dest='license', action='store_true', help='print license and exit.')
choice.add_argument('-m', '--more', dest='commands', action='store_true', help='print all the available commands and exit')
choice.add_argument('--save-cookie', action='store_true', dest='save_cookie', help='save session cookies into the file')
choice.add_argument('--show-time', dest='time', action='store_true', help='show time in all logs')
choice.add_argument('-v', '--version', dest='version', action='store_true', help='print version information and exit')
choice.add_argument('-u', '--upload', dest='up_file', action='store_true', help='enable file upload. program will send hd image links')
arg = parse.parse_args()
if arg.version:
exit ('v%s' % __VERSION__)
if arg.license:
exit (__LICENSE__)
if arg.commands:
exit ('\n' + ('\n'.join(HELP_TEXT)).replace('<br>', '\n') + '\n')
if arg.default_botname:
__BOTNAME__ = arg.default_botname
if arg.ignore_command:
for i in arg.ignore_command:
if i.lower() != 'help':
cmd = '@'+ i.lower() if i[0] != '@' else i.lower()
BLACKLIST_COMMAND.append(cmd)
if arg.add_admin:
for i in arg.add_admin:
ADMIN.append(i.lower())
if arg.add_blacklist_user:
for i in arg.add_blacklist_user:
BLACKLIST.append(i.lower())
if arg.run and arg.cookie and not arg.own_bot or arg.run and not arg.cookie and arg.own_bot:
if arg.delete_logfile and arg.log:
if os.path.isfile(arg.log):
os.remove(arg.log)
if arg.clear_screen:
print('\x1bc')
try:
logger('Facebook Messenger bot | created by zevtyardt', 'DEBUG')
user, pwd = arg.own_bot.split(':') if arg.own_bot else ('', '')
start_bot(user, pwd)
except KeyboardInterrupt:
logger('user interrupt: stopped bot\n'+'-'*arg.long_separator, 'ERROR', ex=True)
except Exception as e:
logger('%s\n%s' % (e, '-'*arg.long_separator), 'CRITICAL', ex=True)
else:
print ('\n' + ( __BOTNAME__ + '\x1b[0m v' + __VERSION__ + '\n').center(77) )
parse.print_help()
if __name__ == '__main__':
main()
|
zvbot
|
/zvbot-0.1.3.tar.gz/zvbot-0.1.3/zvbot.py
|
zvbot.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='zvbot',
version='0.1.3',
description='Create a virtual server for Bot Messenger Facebook with a personal account',
long_description='see more https://github.com/zevtyardt/zvbot/blob/master/README.md',
author='noval wahyu ramadhan',
author_email='[email protected]',
url='https://github.com/zevtyardt/zvbot',
py_modules = ['zvbot'],
include_package_data=True,
install_requires=[
'mechanize',
'requests',
'bs4',
],
license="MIT",
zip_safe=False,
keywords=[
'zvbot',
'bot',
'messenger'
],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
],
entry_points={'console_scripts': ['zvbot = zvbot:main']},
)
|
zvbot
|
/zvbot-0.1.3.tar.gz/zvbot-0.1.3/setup.py
|
setup.py
|
[](https://pypi.org/project/zvdata/)
[](https://pypi.org/project/zvdata/)
[](https://pypi.org/project/zvdata/)
[](https://travis-ci.org/zvtvz/zvdata)
[](https://codecov.io/github/zvtvz/zvdata)
[](http://hits.dwyl.io/zvtvz/zvdata)
**其他语言: [english](README-en.md).**
zvdata是一个可扩展的记录数据和分析数据的库.
# 如何使用
这是[zvt](https://github.com/zvtvz/zvt)抽象出来的通用库,可以用一种方便的方式来记录,计算和可视化数据.
# 联系方式
微信 foolcage
|
zvdata
|
/zvdata-1.2.3.tar.gz/zvdata-1.2.3/README.md
|
README.md
|
#!/usr/bin/env python
# To use a consistent encoding
from codecs import open
from os import path
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Arguments marked as "Required" below must be included for upload to PyPI.
# Fields marked as "Optional" may be commented out.
setup(
# This is the name of your project. The first time you publish this
# package, this name will be registered for you. It will determine how
# users can install this project, e.g.:
#
# $ pip install sampleproject
#
# And where it will live on PyPI: https://pypi.org/project/sampleproject/
#
# There are some restrictions on what makes a valid project name
# specification here:
# https://packaging.python.org/specifications/core-metadata/#name
name='zvdata', # Required
# Versions should comply with PEP 440:
# https://www.python.org/dev/peps/pep-0440/
#
# For a discussion on single-sourcing the version across setup.py and the
# project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.2.3', # Required
# This is a one-line description or tagline of what your project does. This
# corresponds to the "Summary" metadata field:
# https://packaging.python.org/specifications/core-metadata/#summary
description='an extendable library for recording and analyzing data', # Required
# This is an optional longer description of your project that represents
# the body of text which users will see when they visit PyPI.
#
# Often, this is the same as your README, so you can just read it in from
# that file directly (as we have already done above)
#
# This field corresponds to the "Description" metadata field:
# https://packaging.python.org/specifications/core-metadata/#description-optional
long_description=long_description, # Optional
# This should be a valid link to your project's main homepage.
#
# This field corresponds to the "Home-Page" metadata field:
# https://packaging.python.org/specifications/core-metadata/#home-page-optional
url='https://github.com/zvtvz/zvdata', # Optional
# This should be your name or the name of the organization which owns the
# project.
author='foolcage', # Optional
# This should be a valid email address corresponding to the author listed
# above.
author_email='[email protected]', # Optional
# Classifiers help users find your project by categorizing it.
#
# For a list of valid classifiers, see
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[ # Optional
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Customer Service',
'Intended Audience :: Education',
'Intended Audience :: Financial and Insurance Industry',
'Topic :: Software Development :: Build Tools',
'Topic :: Office/Business :: Financial :: Investment',
# Pick your license as you wish
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.6',
],
# This field adds keywords for your project which will appear on the
# project page. What does your project relate to?
#
# Note that this is a string of words separated by whitespace, not a list.
keywords='spider pandas data-science data-cleaning sqlalchemy',
# Optional
# You can just specify package directories manually here if your project is
# simple. Or you can use find_packages().
#
# Alternatively, if you just want to distribute a single Python file, use
# the `py_modules` argument instead as follows, which will expect a file
# called `my_module.py` to exist:
#
# py_modules=["my_module"],
#
packages=find_packages(exclude=['contrib', 'docs', 'tests', 've']), # Required
# This field lists other packages that your project depends on to run.
# Any package you put here will be installed by pip when your project is
# installed, so they must be valid existing projects.
#
# For an analysis of "install_requires" vs pip's requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['SQLAlchemy>=1.2.14', 'pandas>=0.24.2', 'arrow>=0.11.0', 'tzlocal>=1.5.1', 'xlrd>=1.1.0'],
# Optional
# List additional groups of dependencies here (e.g. development
# dependencies). Users will be able to install these using the "extras"
# syntax, for example:
#
# $ pip install sampleproject[dev]
#
# Similar to `install_requires` above, these must be valid existing
# projects.
# extras_require={ # Optional
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
# },
# If there are data files included in your packages that need to be
# installed, specify them here.
#
# If using Python 2.6 or earlier, then these have to be included in
# MANIFEST.in as well.
# package_data={ # Optional
# 'sample': ['package_data.dat'],
# },
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
#
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])], # Optional
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# `pip` to create the appropriate form of executable for the target
# platform.
#
# For example, the following would provide a command called `sample` which
# executes the function `main` from this package when invoked:
# entry_points={ # Optional
# },
# List additional URLs that are relevant to your project as a dict.
#
# This field corresponds to the "Project-URL" metadata fields:
# https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use
#
# Examples listed include a pattern for specifying where the package tracks
# issues, where the source is hosted, where to say thanks to the package
# maintainers, and where to support the project financially. The key is
# what's used to render the link text on PyPI.
project_urls={ # Optional
'Bug Reports': 'https://github.com/zvtvz/zvdata/issues',
'Funding': 'https://www.foolcage.com',
'Say Thanks!': 'https://saythanks.io/to/foolcage',
'Source': 'https://github.com/zvtvz/zvdata',
},
include_package_data=True,
long_description_content_type="text/markdown",
)
|
zvdata
|
/zvdata-1.2.3.tar.gz/zvdata-1.2.3/setup.py
|
setup.py
|
# Zorroa Python SDK
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/README.md
|
README.md
|
#!/usr/bin/env python3
from setuptools import setup
# See https://packaging.python.org/tutorials/packaging-projects/
# for details about packaging python projects
# Generating distribution archives (run from same directory as this file)
# python3 -m pip install --user --upgrade setuptools wheel
# python3 setup.py sdist bdist_wheel
requirements = [
"requests",
"pyOpenSSL",
"PyJWT>=2.0",
"backoff",
"pytest"
]
setup(
name='zvi-client',
version="1.1.3",
description='Zorroa Visual Intelligence Python Client',
url='http://www.zorroa.com',
license='Apache2',
package_dir={'': 'pylib'},
packages=['zmlp', 'zmlp.app', 'zmlp.entity'],
scripts=[],
author="Matthew Chambers",
author_email="[email protected]",
keywords="machine learning artificial intelligence",
python_requires='>=3.4',
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent"
],
include_package_data=True,
install_requires=requirements
)
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/setup.py
|
setup.py
|
import functools
import re
import uuid
import decimal
import zipfile
import os
def is_valid_uuid(val):
"""
Return true if the given value is a valid UUID.
Args:
val (str): a string which might be a UUID.
Returns:
bool: True if UUID
"""
try:
uuid.UUID(str(val))
return True
except ValueError:
return False
def as_collection(value):
"""If the given value is not a collection of some type, return
the value wrapped in a list.
Args:
value (:obj:`mixed`):
Returns:
:obj:`list` of :obj:`mixed`: The value wrapped in alist.
"""
if value is None:
return None
if isinstance(value, (set, list, tuple)):
return value
return [value]
class ObjectView:
"""
Wraps a dictionary and provides an object based view.
"""
snake = re.compile(r'(?<!^)(?=[A-Z])')
def __init__(self, d):
d = dict([(self.snake.sub('_', k).lower(), v) for k, v in d.items()])
self.__dict__ = d
def as_id(value):
"""
If 'value' is an object, return the 'id' property, otherwise return
the value. This is useful for when you need an entity's unique Id
but the user passed in an instance of the entity.
Args:
value (mixed): A string o an object with an 'id' property.
Returns:
str: The id property.
"""
return getattr(value, 'id', value)
def as_id_collection(value):
"""If the given value is not a collection of some type, return
the value wrapped in a list. Additionally entity instances
are resolved into their unique id.
Args:
value (:obj:`mixed`):
Returns:
list: A list of entity unique ids.
"""
if value is None:
return None
if isinstance(value, (set, list, tuple, dict)):
return [getattr(it, "id", it) for it in value]
return [getattr(value, "id", value)]
def memoize(func):
"""
Cache the result of the given function.
Args:
func (function): A function to wrap.
Returns:
function: a wrapped function
"""
cache = func.cache = {}
@functools.wraps(func)
def memoized_func(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = func(*args, **kwargs)
return cache[key]
return memoized_func
def truncate(number, places):
"""
Truncate a float to the given number of places.
Args:
number (float): The number to truncate.
places (int): The number of plaes to preserve.
Returns:
Decimal: The truncated decimal value.
"""
if not isinstance(places, int):
raise ValueError('Decimal places must be an integer.')
if places < 1:
raise ValueError('Decimal places must be at least 1.')
with decimal.localcontext() as context:
context.rounding = decimal.ROUND_DOWN
exponent = decimal.Decimal(str(10 ** - places))
return decimal.Decimal(str(number)).quantize(exponent)
def round_all(items, precision=3):
"""
Round all items in the list.
Args:
items (list): A list of floats.
precision: (int): number of decimal places.
Returns:
list: A rounded list.
"""
return [round(i, precision) for i in items]
def zip_directory(src_dir, dst_file, zip_root_name=""):
"""
A utility function for ziping a directory of files.
Args:
src_dir (str): The source directory.
dst_file (str): The destination file.s
zip_root_name (str): A optional root directory to place files in the zip.
Returns:
str: The dst file.
"""
def zipdir(path, ziph, root_name):
for root, dirs, files in os.walk(path):
for file in files:
if file == ".DS_Store":
continue
zip_entry = os.path.join(root_name, root.replace(path, ""), file)
ziph.write(os.path.join(root, file), zip_entry)
src_dir = os.path.abspath(src_dir)
zipf = zipfile.ZipFile(dst_file, 'w', zipfile.ZIP_DEFLATED)
zipdir(src_dir + '/', zipf, zip_root_name)
zipf.close()
return dst_file
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/util.py
|
util.py
|
"""Classes to support training with Models"""
import os
import logging
import json
logger = logging.getLogger(__name__)
__all__ = [
'TrainingSetDownloader'
]
class TrainingSetDownloader:
"""
The TrainingSetDownloader class handles writing out the images labeled
for model training to local disk. The Assets are automatically sorted
into train and validation sets.
Multiple directory layouts are supported based on the Model type.
Examples:
# Label Detection Layout
base_dir/flowers/set_validate/daisy
base_dir/flowers/set_validate/rose
base_dir/flowers/set_validate/daisy
base_dir/flowers/set_validate/rose
# Object Detection Layout is a COCO compatible layout
base_dir/set_train/images/*
base_dir/set_train/annotations.json
base_dir/set_test/images/*
base_dir/set_test/annotations.json
"""
SET_TRAIN = "train"
"""Directory name for training images"""
SET_VALIDATION = "validate"
"""Directory name for test images"""
def __init__(self, app, model, style, dst_dir, validation_split=0.2):
"""
Create a new TrainingImageDownloader.
Args:
app: (ZmlpApp): A ZmlpApp instance.
model: (Model): A Model or unique Model ID.
style: (str): The output style: labels_std, objects_keras, objects_coco
dst_dir (str): A destination directory to write the files into.
validation_split (float): The number of images in the training
set for every image in the validation set.
"""
self.app = app
self.model = app.models.get_model(model)
self.style = style
self.dst_dir = dst_dir
self.validation_split = validation_split
self.labels = {}
self.label_distrib = {}
self.query = {
'size': 64,
'_source': ['labels', 'files'],
'query': {
'nested': {
'path': 'labels',
'query': {
'bool': {
'must': [
{'term': {'labels.modelId': self.model.id}},
{'term': {'labels.scope': 'TRAIN'}}
]
}
}
}
}
}
os.makedirs(self.dst_dir, exist_ok=True)
def build(self, pool=None):
"""
Downloads the files labeled for training a Model to local disk.
Args:
labels_std, objects_keras, objects_coco
pool (multiprocessing.Pool): An optional Pool instance which can be used
to download files in parallel.
"""
if self.style == 'labels-standard':
self._build_labels_std_format(pool)
elif self.style == 'objects_coco':
self._build_objects_coco_format(pool)
elif self.style == 'objects_keras':
self._build_objects_keras_format(pool)
else:
raise ValueError('{} not supported by the TrainingSetDownloader'.format(format))
def _build_labels_std_format(self, pool):
self._setup_labels_std_base_dir()
for num, asset in enumerate(self.app.assets.scroll_search(self.query, timeout='5m')):
prx = asset.get_thumbnail(0)
if not prx:
logger.warning('{} did not have a suitable thumbnail'.format(asset))
continue
ds_labels = self._get_labels(asset)
if not ds_labels:
logger.warning('{} did not have any labels'.format(asset))
continue
label = ds_labels[0].get('label')
if not label:
logger.warning('{} was not labeled.'.format(asset))
continue
dir_name = self._get_image_set_type(label)
dst_path = os.path.join(self.dst_dir, dir_name, label, prx.cache_id)
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
logger.info('Downloading to {}'.format(dst_path))
if pool:
pool.apply_async(self.app.assets.download_file, args=(prx, dst_path))
else:
self.app.assets.download_file(prx, dst_path)
def _build_objects_coco_format(self, pool=None):
"""
Write a labeled assets in a COCO object detection training structure.
Args:
pool (multiprocessing.Pool): A multi-processing pool for downloading really fast.
Returns:
str: A path to an annotation file.
"""
self._setup_objects_coco_base_dir()
coco = CocoAnnotationFileBuilder()
for image_id, asset in enumerate(self.app.assets.scroll_search(self.query, timeout='5m')):
prx = asset.get_thumbnail(1)
if not prx:
logger.warning('{} did not have a suitable thumbnail'.format(asset))
continue
ds_labels = self._get_labels(asset)
if not ds_labels:
logger.warning('{} did not have any labels'.format(asset))
continue
for label in ds_labels:
set_type = self._get_image_set_type(label['label'])
dst_path = os.path.join(self.dst_dir, set_type, 'images', prx.cache_id)
if not os.path.exists(dst_path):
self._download_file(prx, dst_path, pool)
image = {
'file_name': dst_path,
'height': prx.attrs['height'],
'width': prx.attrs['width']
}
category = {
'supercategory': 'none',
'name': label['label']
}
bbox, area = self._zvi_to_cocos_bbox(prx, label['bbox'])
annotation = {
'bbox': bbox,
'segmentation': [],
'ignore': 0,
'area': area,
'iscrowd': 0
}
if set_type == self.SET_TRAIN:
coco.add_to_training_set(image, category, annotation)
else:
coco.add_to_validation_set(image, category, annotation)
# Write out the annotations files.
with open(os.path.join(self.dst_dir, self.SET_TRAIN, "annotations.json"), "w") as fp:
logger.debug("Writing training set annotations to {}".format(fp.name))
json.dump(coco.get_training_annotations(), fp)
with open(os.path.join(self.dst_dir, self.SET_VALIDATION, "annotations.json"), "w") as fp:
logger.debug("Writing test set annotations to {}".format(fp.name))
json.dump(coco.get_validation_annotations(), fp)
def _build_objects_keras_format(self, pool=None):
self._setup_objects_keras_base_dir()
fp_train = open(os.path.join(self.dst_dir, self.SET_TRAIN, "annotations.csv"), "w")
fp_test = open(os.path.join(self.dst_dir, self.SET_VALIDATION, "annotations.csv"), "w")
unique_labels = set()
try:
search = self.app.assets.scroll_search(self.query, timeout='5m')
for image_id, asset in enumerate(search):
prx = asset.get_thumbnail(1)
if not prx:
logger.warning('{} did not have a suitable thumbnail'.format(asset))
continue
ds_labels = self._get_labels(asset)
if not ds_labels:
logger.warning('{} did not have any labels'.format(asset))
continue
for label in ds_labels:
unique_labels.add(label['label'])
set_type = self._get_image_set_type(label['label'])
dst_path = os.path.join(self.dst_dir, set_type, 'images', prx.cache_id)
if not os.path.exists(dst_path):
self._download_file(prx, dst_path, pool)
line = [
dst_path
]
line.extend([str(point) for point in
self._zvi_to_keras_bbox(prx, label['bbox'])])
line.append(label['label'])
str_line = "{}\n".format(",".join(line))
if set_type == self.SET_TRAIN:
fp_train.write(str_line)
else:
fp_test.write(str_line)
finally:
fp_train.close()
fp_test.close()
with open(os.path.join(self.dst_dir, "classes.csv"), "w") as fp_classes:
for idx, cls in enumerate(sorted(unique_labels)):
fp_classes.write("{},{}\n".format(cls, idx))
def _zvi_to_keras_bbox(self, prx, bbox):
total_width = prx.attrs['width']
total_height = prx.attrs['height']
return [int(total_width * bbox[0]),
int(total_height * bbox[1]),
int(total_width * bbox[2]),
int(total_height * bbox[3])]
def _zvi_to_cocos_bbox(self, prx, bbox):
"""
Converts a ZVI bbox to a COCOs bbox. The format is x, y, width, height.
Args:
prx (StoredFile): A StoredFile containing a proxy image.
bbox (list): A ZVI bbox.
Returns:
list[float]: A COCOs style bbox.
"""
total_width = prx.attrs['width']
total_height = prx.attrs['height']
pt = total_width * bbox[0], total_height * bbox[1]
new_bbox = [
int(pt[0]),
int(pt[1]),
int(abs(pt[0] - (total_width * bbox[2]))),
int(abs(pt[0] - (total_height * bbox[3])))
]
area = (new_bbox[2] - new_bbox[0]) * (new_bbox[3] - new_bbox[1])
return new_bbox, area
def _download_file(self, prx, dst_path, pool=None):
if pool:
pool.apply_async(self.app.assets.download_file, args=(prx, dst_path))
else:
self.app.assets.download_file(prx, dst_path)
def _setup_labels_std_base_dir(self):
"""
Sets up a directory structure for storing files used to train a model..
The structure is basically:
train/<label>/<img file>
validate/<label>/<img file>
"""
self.labels = self.app.models.get_label_counts(self.model)
# This is layout #1, we need to add darknet layout for object detection.
dirs = (self.SET_TRAIN, self.SET_VALIDATION)
for set_name in dirs:
os.makedirs('{}/{}'.format(self.dst_dir, set_name), exist_ok=True)
for label in self.labels.keys():
os.makedirs(os.path.join(self.dst_dir, set_name, label), exist_ok=True)
logger.info('TrainingSetDownloader setup, using {} labels'.format(len(self.labels)))
def _setup_objects_coco_base_dir(self):
dirs = (self.SET_TRAIN, self.SET_VALIDATION)
for set_name in dirs:
os.makedirs(os.path.join(self.dst_dir, set_name, 'images'), exist_ok=True)
def _setup_objects_keras_base_dir(self):
dirs = (self.SET_TRAIN, self.SET_VALIDATION)
for set_name in dirs:
os.makedirs(os.path.join(self.dst_dir, set_name, 'images'), exist_ok=True)
def _get_image_set_type(self, label):
"""
Using the validation_split property, determine if the current label
would be in the training set or validation set.
Args:
label (str): The label name.
Returns:
str: Either 'validate' or 'train', depending on the validation_split property.
"""
# Everything is in the training set.
if self.validation_split <= 0.0:
return self.SET_TRAIN
ratio = int(1.0 / self.validation_split)
value = self.label_distrib.get(label, 0) + 1
self.label_distrib[label] = value
if value % ratio == 0:
return self.SET_VALIDATION
else:
return self.SET_TRAIN
def _get_labels(self, asset):
"""
Get the current model label for the given asset.
Args:
asset (Asset): The asset to check.
Returns:
list[dict]: The labels for training a model.
"""
ds_labels = asset.get_attr('labels')
if not ds_labels:
return []
result = []
for ds_label in ds_labels:
if ds_label.get('modelId') == self.model.id:
result.append(ds_label)
return result
class CocoAnnotationFileBuilder:
"""
CocoAnnotationFileBuilder manages building a COCO annotations file for both
a training set and test set.
"""
def __init__(self):
self.train_set = {
"output": {
"type": "instances",
"images": [],
"annotations": [],
"categories": []
},
"img_set": {},
"cat_set": {}
}
self.validation_set = {
"output": {
"type": "instances",
"images": [],
"annotations": [],
"categories": []
},
"img_set": {},
"cat_set": {}
}
def add_to_training_set(self, img, cat, annotation):
"""
Add the image, category and annotation to the training set.
Args:
img (dict): A COCO image dict.
cat (dict): A COCO category dict.
annotation: (dict): A COCO annotation dict.
"""
self._add_to_set(self.train_set, img, cat, annotation)
def add_to_validation_set(self, img, cat, annotation):
"""
Add the image, category and annotation to the test set.
Args:
img (dict): A COCO image dict.
cat (dict): A COCO category dict.
annotation: (dict): A COCO annotation dict.
"""
self._add_to_set(self.validation_set, img, cat, annotation)
def _add_to_set(self, dataset, img, cat, annotation):
"""
Add the image, category and annotation to the given set.
Args:
dataset (dict): The set we're building.
img (dict): A COCO image dict.
cat (dict): A COCO category dict.
annotation: (dict): A COCO annotation dict.
"""
img_idmap = dataset['img_set']
cat_idmap = dataset['cat_set']
output = dataset['output']
annots = output['annotations']
img['id'] = img_idmap.get(img['file_name'], len(img_idmap))
cat['id'] = cat_idmap.get(cat['name'], len(cat_idmap))
annotation['id'] = len(annots)
annotation['category_id'] = cat['id']
annotation['image_id'] = img['id']
if img['file_name'] not in img_idmap:
img_idmap[img['file_name']] = img['id']
output['images'].append(img)
if cat['name'] not in cat_idmap:
cat_idmap[cat['name']] = cat['id']
output['categories'].append(cat)
output['annotations'].append(annotation)
def get_training_annotations(self):
"""
Return a structure suitable for a COCO annotations file.
Returns:
dict: The training annoations.=
"""
return self.train_set['output']
def get_validation_annotations(self):
"""
Return a structure suitable for a COCO annotations file.
Returns:
dict: The test annoations.
"""
return self.validation_set['output']
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/training.py
|
training.py
|
import copy
from .entity import VideoClip, Asset, ZmlpException
from .util import as_collection
__all__ = [
'AssetSearchScroller',
'VideoClipSearchScroller',
'AssetSearchResult',
'VideoClipSearchResult',
'AssetSearchCsvExporter',
'LabelConfidenceQuery',
'SingleLabelConfidenceQuery',
'SimilarityQuery',
'FaceSimilarityQuery',
'LabelConfidenceTermsAggregation',
'LabelConfidenceMetricsAggregation'
]
class SearchScroller:
"""
The SearchScroller can iterate over large amounts of data without incurring paging
overhead by utilizing a server side cursor. The cursor is held open for the specified
timeout time unless it is refreshed before the timeout occurs. In this sense, it's important
to complete whatever operation you're taking on each asset within the timeout time. For example
if your page size is 32 and your timeout is 1m, you have 1 minute to handles 32 assets. If that
is not enough time, consider increasing the timeout or lowering your page size.
"""
def __init__(self, klass, endpoint, app, search, timeout="1m", raw_response=False):
"""
Create a new AbstractSearchScroller instance.
Args:
app (ZmlpApp): A ZmlpApp instance.
search: (dict): The ES search
timeout (str): The maximum amount of time the ES scroll will be active unless it's
refreshed.
raw_response (bool): Yield the raw ES response rather than assets. The raw
response will contain the entire page, not individual assets.
"""
self.klass = klass
self.endpoint = endpoint
self.app = app
if search and getattr(search, "to_dict", None):
search = search.to_dict()
self.search = copy.deepcopy(search or {})
self.timeout = timeout
self.raw_response = raw_response
def batches_of(self, batch_size=50):
"""
A generator function capable of efficiently scrolling through
large numbers of assets, returning them in batches of
the given batch size.
Args:
batch_size (int): The size of the batch.
Returns:
generator: A generator that yields batches of Assets.
"""
batch = []
for asset in self.scroll():
batch.append(asset)
if len(batch) >= batch_size:
yield batch
batch = []
if batch:
yield batch
def scroll(self):
"""
A generator function capable of efficiently scrolling through large
results.
Examples:
for asset in AssetSearchScroller({"query": {"term": { "source.extension": "jpg"}}}):
do_something(asset)
Yields:
Asset: Assets that matched the search
"""
result = self.app.client.post(
"{}?scroll={}".format(self.endpoint, self.timeout), self.search)
scroll_id = result.get("_scroll_id")
if not scroll_id:
raise ZmlpException("No scroll ID returned with scroll search, has it timed out?")
try:
while True:
hits = result.get("hits")
if not hits:
return
if self.raw_response:
yield result
else:
for hit in hits['hits']:
yield self.klass.from_hit(hit)
scroll_id = result.get("_scroll_id")
if not scroll_id:
raise ZmlpException(
"No scroll ID returned with scroll search, has it timed out?")
result = self.app.client.post("api/v3/assets/_search/scroll", {
"scroll": self.timeout,
"scroll_id": scroll_id
})
if not result["hits"]["hits"]:
return
finally:
self.app.client.delete("api/v3/assets/_search/scroll", {
"scroll_id": scroll_id
})
def __iter__(self):
return self.scroll()
class AssetSearchScroller(SearchScroller):
"""
AssetSearchScroller handles scrolling through Assets.
"""
def __init__(self, app, search, timeout="1m", raw_response=False):
super(AssetSearchScroller, self).__init__(
Asset, 'api/v3/assets/_search', app, search, timeout, raw_response
)
class VideoClipSearchScroller(SearchScroller):
"""
VideoClipSearchScroller handles scrolling through video clips.
"""
def __init__(self, app, search, timeout="1m", raw_response=False):
super(VideoClipSearchScroller, self).__init__(
VideoClip, 'api/v1/clips/_search', app, search, timeout, raw_response
)
class AssetSearchCsvExporter:
"""
Export a search to a CVS file.
"""
def __init__(self, app, search):
self.app = app
self.search = search
def export(self, fields, path):
"""
Export the given fields to a csv file output path.
Args:
fields (list): An array of field names.
path (str): a file path.
Returns:
int: The number of assets exported.
"""
count = 0
scroller = AssetSearchScroller(self.app, self.search)
fields = as_collection(fields)
with open(str(path), "w") as fp:
for asset in scroller:
count += 1
line = ",".join(["'{}'".format(asset.get_attr(field)) for field in fields])
fp.write(f'{line}\n')
return count
class SearchResult:
"""
Stores a search result from ElasticSearch and provides some convenience methods
for accessing the data.
"""
def __init__(self, klass, endpoint, app, search):
"""
Create a new SearchResult.
Args:
klass (Class): The Class to wrap the search result.
endpoint (str): The endpoint to use for search.
app (ZmlpApp): A ZmlpApp instance.
search (dict): An ElasticSearch query.
"""
self.klass = klass
self.endpoint = endpoint
self.app = app
if search and getattr(search, "to_dict", None):
search = search.to_dict()
self.search = search
self.result = None
self._execute_search()
@property
def items(self):
"""
A list of assets returned by the query. This is not all of the matches,
just a single page of results.
Returns:
list: The list of assets for this page.
"""
hits = self.result.get("hits")
if not hits:
return []
return [self.klass.from_hit(hit) for hit in hits['hits']]
def batches_of(self, batch_size, max_assets=None):
"""
A generator function which returns batches of assets in the
given batch size. This method will optionally page through
N pages, yielding arrays of assets as it goes.
This method is preferred to scrolling for Assets when
multiple pages of Assets need to be processed.
Args:
batch_size (int): The size of the batch.
max_assets (int): The max number of assets to return, max is 10k
Returns:
generator: A generator that yields batches of Assets.
"""
# The maximum we can page through is 10k
asset_countdown = max_assets or 10000
batch = []
while True:
assets = self.assets
if not assets:
break
for asset in assets:
batch.append(asset)
asset_countdown -= 1
if asset_countdown <= 0:
break
if len(batch) >= batch_size:
yield batch
batch = []
if asset_countdown <= 0:
break
self.search['from'] = self.search.get('from', 0) + len(assets)
self._execute_search()
if batch:
yield batch
def aggregation(self, name):
"""
Return an aggregation dict with the given name.
Args:
name (str): The agg name
Returns:
dict: the agg dict or None if no agg exists.
"""
aggs = self.result.get("aggregations")
if not aggs:
return None
if "#" in name:
key = [name]
else:
key = [k for k in
self.result.get("aggregations", {}) if k.endswith("#{}".format(name))]
if len(key) > 1:
raise ValueError(
"Aggs with the same name must be qualified by type (pick 1): {}".format(key))
elif not key:
return None
try:
return aggs[key[0]]
except KeyError:
return None
def aggregations(self):
"""
Return a dictionary of all aggregations.
Returns:
dict: A dict of aggregations keyed on name.
"""
return self.result.get("aggregations", {})
@property
def size(self):
"""
The number assets in this page. See "total_size" for the total number of assets matched.
Returns:
int: The number of assets in this page.
"""
return len(self.result["hits"]["hits"])
@property
def total_size(self):
"""
The total number of assets matched by the query.
Returns:
long: The total number of assets matched.
"""
return self.result["hits"]["total"]["value"]
@property
def raw_response(self):
"""
The raw ES response.
Returns:
(dict) The raw SearchResponse returned by ElasticSearch
"""
return self.result
def next_page(self):
"""
Return an AssetSearchResult containing the next page.
Returns:
AssetSearchResult: The next page
"""
search = copy.deepcopy(self.search or {})
search['from'] = search.get('from', 0) + len(self.result.get("hits"))
return SearchResult(self.klass, self.endpoint, self.app, search)
def _execute_search(self):
self.result = self.app.client.post(self.endpoint, self.search)
def __iter__(self):
return iter(self.items)
def __getitem__(self, item):
return self.items[item]
class AssetSearchResult(SearchResult):
"""
The AssetSearchResult subclass handles paging throug an Asset search result.
"""
def __init__(self, app, search):
super(AssetSearchResult, self).__init__(
Asset, 'api/v3/assets/_search', app, search
)
@property
def assets(self):
return self.items
class VideoClipSearchResult(SearchResult):
"""
The VideoClipSearchResult subclass handles paging through an VideoClip search result.
"""
def __init__(self, app, search):
super(VideoClipSearchResult, self).__init__(
VideoClip, 'api/v1/clips/_search', app, search
)
@property
def clips(self):
return self.items
class LabelConfidenceTermsAggregation:
"""
Convenience class for making a simple terms aggregation on an array of predictions
"""
def __init__(self, namespace):
self.field = "analysis.{}.predictions".format(namespace)
def for_json(self):
return {
"nested": {
"path": self.field
},
"aggs": {
"names": {
"terms": {
"field": self.field + ".label",
"size": 1000,
"order": {"_count": "desc"}
}
}
}
}
class LabelConfidenceMetricsAggregation(object):
def __init__(self, namespace, agg_type="stats"):
"""
Create a new LabelConfidenceMetricsAggregation
Args:
namespace (str): The analysis namespace. (ex: zvi-label-detection)
agg_type (str): A type of metrics agg to perform.
stats, extended_stats,
"""
self.field = "analysis.{}.predictions".format(namespace)
self.agg_type = agg_type
def for_json(self):
return {
"nested": {
"path": self.field
},
"aggs": {
"labels": {
"terms": {
"field": self.field + ".label",
"size": 1000,
"order": {"_count": "desc"}
},
"aggs": {
"stats": {
self.agg_type: {
"field": self.field + ".score"
}
}
}
}
}
}
class LabelConfidenceQuery(object):
"""
A helper class for building a label confidence score query. This query must point
at label confidence structure: For example: analysis.zvi.label-detection.
References:
"labels": [
{"label": "dog", "score": 0.97 },
{"label": "fox", "score": 0.63 }
]
"""
def __init__(self, namespace, labels, min_score=0.1, max_score=1.0):
"""
Create a new LabelConfidenceScoreQuery.
Args:
namespace (str): The analysis namespace with predictions. (ex: zvi-label-detection)
labels (list): A list of labels to filter.
min_score (float): The minimum label score, default to 0.1.
Note that 0.0 allows everything.
max_score (float): The maximum score, defaults to 1.0 which is highest
"""
self.namespace = namespace
self.field = "analysis.{}.predictions".format(namespace)
self.labels = as_collection(labels)
self.score = [min_score, max_score]
def for_json(self):
return {
"bool": {
"filter": [
{
"terms": {
self.field + ".label": self.labels
}
}
],
"must": [
{
"nested": {
"path": self.field,
"query": {
"function_score": {
"boost_mode": "sum",
"field_value_factor": {
"field": self.field + ".score",
"missing": 0
},
"query": {
"bool": {
"filter": [
{
"terms": {
self.field + ".label": self.labels
}
},
{
"range": {
self.field + ".score": {
"gte": self.score[0],
"lte": self.score[1]
}
}
}
]
}
}
}
}
}
}
]
}
}
class SingleLabelConfidenceQuery(object):
"""
A helper class for building a label confidence score query. This query must point
at label confidence structure: For example: analysis.zvi.label-detection.
References:
"labels": [
{"label": "dog", "score": 0.97 },
{"label": "fox", "score": 0.63 }
]
"""
def __init__(self, namespace, labels, min_score=0.1, max_score=1.0):
"""
Create a new SingleLabelConfidenceScoreQuery.
Args:
namespace (str): The analysis namespace with predictions. (ex: zvi-label-detection)
labels (list): A list of labels to filter.
min_score (float): The minimum label score, default to 0.1.
Note that 0.0 allows everything.
max_score (float): The maximum score, defaults to 1.0 which is highest
"""
self.namespace = namespace
self.field = "analysis.{}".format(namespace)
self.labels = as_collection(labels)
self.score = [min_score, max_score]
def for_json(self):
return {
"bool": {
"filter": [
{
"terms": {
self.field + ".label": self.labels
}
}
],
"must": [
{
"function_score": {
"query": {
"bool": {
"must": [
{
"terms": {
self.field + ".label": self.labels
}
},
{
"range": {
self.field + ".score": {
"gte": self.score[0],
"lte": self.score[1]
}
}
}
]
}
},
"boost": "5",
"boost_mode": "sum",
"field_value_factor": {
"field": self.field + ".score",
"missing": 0
}
}
}
]
}
}
class SimilarityQuery:
"""
A helper class for building a similarity search. You can embed this class anywhere
in a ES query dict, for example:
References:
{
"query": {
"bool": {
"must": [
SimilarityQuery(hash_string)
]
}
}
}
"""
def __init__(self, hashes, min_score=0.75, boost=1.0,
field="analysis.zvi-image-similarity.simhash"):
self.field = field
self.hashes = []
self.min_score = min_score
self.boost = boost
self.add_hash(hashes)
def add_hash(self, hashes):
"""
Add a new hash to the search.
Args:
hashes (mixed): A similarity hash string or an asset.
Returns:
SimilarityQuery: this instance of SimilarityQuery
"""
for simhash in as_collection(hashes) or []:
if isinstance(simhash, Asset):
self.hashes.append(simhash.get_attr(self.field))
elif isinstance(simhash, VideoClip):
if simhash.simhash:
self.hashes.append(simhash.simhash)
else:
self.hashes.append(simhash)
return self
def add_asset(self, asset):
"""
See add_hash which handles both hashes and Assets.
"""
return self.add_hash(asset)
def for_json(self):
return {
"script_score": {
"query": {
"match_all": {}
},
"script": {
"source": "similarity",
"lang": "zorroa-similarity",
"params": {
"minScore": self.min_score,
"field": self.field,
"hashes": self.hashes
}
},
"boost": self.boost,
"min_score": self.min_score
}
}
def __add__(self, simhash):
self.add_hash(simhash)
return self
class VideoClipSimilarityQuery(SimilarityQuery):
def __init__(self, hashes, min_score=0.75, boost=1.0):
super(VideoClipSimilarityQuery, self).__init__(
hashes, min_score, boost, 'clip.simhash')
class FaceSimilarityQuery:
"""
Performs a face similarity search.
"""
def __init__(self, faces, min_score=0.90, boost=1.0,
field="analysis.zvi-face-detection.predictions.simhash"):
"""
Create a new FaceSimilarityQuery.
Args:
faces (list): A prediction with a 'simhash' property or a simhash itself.
min_score (float): The minimum score.
boost (float): A boost value which weights this query higer than others.
field (str): An optional field to compare make the comparison with. Defaults to ZVI.
"""
hashes = []
for face in as_collection(faces):
if isinstance(face, str):
hashes.append(face)
else:
hashes.append(face['simhash'])
self.simquery = SimilarityQuery(
hashes,
min_score,
boost,
field)
def for_json(self):
return self.simquery.for_json()
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/search.py
|
search.py
|
import base64
import binascii
import datetime
import decimal
import json
import logging
import os
import random
import sys
import time
from io import IOBase
from urllib.parse import urljoin
import jwt
import requests
from .entity.exception import ZmlpException
logger = logging.getLogger(__name__)
DEFAULT_SERVER = 'https://api.zvi.zorroa.com'
class ZmlpClient(object):
"""
ZmlpClient is used to communicate to a ZMLP API server.
"""
def __init__(self, apikey, server, **kwargs):
"""
Create a new ZmlpClient instance.
Args:
apikey: An API key in any supported form. (dict, base64 string, or open file handle)
server: The url of the server to connect to. Defaults to https://api.zmlp.zorroa.com
project_id: An optional project UUID for API keys with access to multiple projects.
max_retries: Maximum number of retries to make if the API server
is down, 0 for unlimited.
"""
self.apikey = self.__load_apikey(apikey)
self.server = server
self.project_id = kwargs.get('project_id', os.environ.get("ZMLP_PROJECT"))
self.max_retries = kwargs.get('max_retries', 3)
self.verify = True
def stream(self, url, dst):
"""
Stream the given URL path to local dst file path.
Args:
url (str): The URL to stream
dst (str): The destination file path
"""
try:
with open(dst, 'wb') as handle:
response = requests.get(self.get_url(url), verify=self.verify,
headers=self.headers(), stream=True)
if not response.ok:
raise ZmlpClientException(
"Failed to stream asset: %s, %s" % (url, response))
for block in response.iter_content(1024):
handle.write(block)
return dst
except requests.exceptions.ConnectionError as e:
raise ZmlpConnectionException(e)
def stream_text(self, url):
"""
Stream the given URL.
Args:
url (str): The URL to stream
Yields:
generator (str): A generator of the lines making up the textual
URL.
"""
try:
response = requests.get(self.get_url(url), verify=self.verify,
headers=self.headers(), stream=True)
if not response.ok:
raise ZmlpClientException(
"Failed to stream text: %s" % response)
for line in response.iter_lines(decode_unicode=True):
yield line
except requests.exceptions.ConnectionError as e:
raise ZmlpConnectionException(e)
def send_file(self, path, file_path):
"""
Sends a file via request body
Args:
path (path): The URI fragment for the request.
file_path (str): The path to the file to send.
Returns:
dict: A dictionary which can be used to fetch the file.
"""
with open(file_path, 'rb') as f:
return self.__handle_rsp(requests.post(
self.get_url(path), headers=self.headers(content_type=""),
data=f), True)
def upload_file(self, path, file, body={}, json_rsp=True):
"""
Upload a single file and a request to the given endpoint path.
Args:
path (str): The URL to upload to.
file (str): The file path to upload.
body (dict): A request body
json_rsp (bool): Set to true if the result returned is JSON
Returns:
dict: The response body of the request.
"""
try:
post_files = [("file", (os.path.basename(file), open(file, 'rb')))]
if body is not None:
post_files.append(
["body", (None, to_json(body), 'application/json')])
return self.__handle_rsp(requests.post(
self.get_url(path), headers=self.headers(content_type=""),
files=post_files), json_rsp)
except requests.exceptions.ConnectionError as e:
raise ZmlpConnectionException(e)
def upload_files(self, path, files, body, json_rsp=True):
"""
Upload an array of files and a reques to the given endpoint path.
Args:
path (str): The URL to upload to
files (list of str): The file paths to upload
body (dict): A request body
json_rsp (bool): Set to true if the result returned is JSON
Returns:
dict: The response body of the request.
"""
try:
post_files = []
for f in files:
if isinstance(f, IOBase):
post_files.append(
("files", (os.path.basename(f.name), f)))
else:
post_files.append(
("files", (os.path.basename(f), open(f, 'rb'))))
if body is not None:
post_files.append(
("body", ("", to_json(body),
'application/json')))
return self.__handle_rsp(requests.post(
self.get_url(path), headers=self.headers(content_type=""),
verify=self.verify, files=post_files), json_rsp)
except requests.exceptions.ConnectionError as e:
raise ZmlpConnectionException(e)
def get(self, path, body=None, is_json=True):
"""
Performs a get request.
Args:
path (str): An archivist URI path.
body (dict): The request body which will be serialized to json.
is_json (bool): Set to true to specify a JSON return value
Returns:
object: The http response object or an object deserialized from the
response json if the ``json`` argument is true.
Raises:
Exception: An error occurred making the request or parsing the
JSON response
"""
return self._make_request('get', path, body, is_json)
def post(self, path, body=None, is_json=True):
"""
Performs a post request.
Args:
path (str): An archivist URI path.
body (object): The request body which will be serialized to json.
is_json (bool): Set to true to specify a JSON return value
Returns:
object: The http response object or an object deserialized from the
response json if the ``json`` argument is true.
Raises:
Exception: An error occurred making the request or parsing the
JSON response
"""
return self._make_request('post', path, body, is_json)
def put(self, path, body=None, is_json=True):
"""
Performs a put request.
Args:
path (str): An archivist URI path.
body (object): The request body which will be serialized to json.
is_json (bool): Set to true to specify a JSON return value
Returns:
object: The http response object or an object deserialized from the
response json if the ``json`` argument is true.
Raises:
Exception: An error occurred making the request or parsing the
JSON response
"""
return self._make_request('put', path, body, is_json)
def delete(self, path, body=None, is_json=True):
"""
Performs a delete request.
Args:
path (str): An archivist URI path.
body (object): The request body which will be serialized to json.
is_json (bool): Set to true to specify a JSON return value
Returns:
object: The http response object or an object deserialized from
the response json if the ``json`` argument is true.
Raises:
Exception: An error occurred making the request or parsing the
JSON response
"""
return self._make_request('delete', path, body, is_json)
def iter_paged_results(self, url, req, limit, cls):
"""
Handles paging through the results of the standard _search
endpoints on the backend.
Args:
url (str): the URL to POST a search to
req (object): the search request body
limit (int): the maximum items to return, None for no limit.
cls (type): the class to wrap each result in
Yields:
Generator
"""
left_to_return = limit or sys.maxsize
page = 0
req["page"] = {}
while True:
if left_to_return < 1:
break
page += 1
req["page"]["size"] = min(100, left_to_return)
req["page"]["from"] = (page - 1) * req["page"]["size"]
rsp = self.post(url, req)
if not rsp.get("list"):
break
for f in rsp["list"]:
yield cls(f)
left_to_return -= 1
# Used to break before pulling new batch
if rsp.get("break"):
break
def _make_request(self, method, path, body=None, is_json=True):
request_function = getattr(requests, method)
if body is not None:
data = to_json(body)
else:
data = body
# Making the request is wrapped in its own try/catch so it's easier
# to catch any and all socket and http exceptions that can possibly be
# thrown. Once hat happens, handle_rsp is called which may throw
# application level exceptions.
rsp = None
tries = 0
url = self.get_url(path, body)
while True:
try:
rsp = request_function(url, data=data, headers=self.headers(),
verify=self.verify)
break
except Exception as e:
# Some form of connection error, wait until archivist comes
# back.
tries += 1
if 0 < self.max_retries <= tries:
raise e
wait = random.randint(1, random.randint(1, 60))
# Switched to stderr in case no logger is setup, still want
# to see messages.
msg = "Communicating to ZMLP (%s) timed out %d times, " \
"waiting ... %d seconds, error=%s\n"
sys.stderr.write(msg % (url, tries, wait, e))
time.sleep(wait)
return self.__handle_rsp(rsp, is_json)
def __handle_rsp(self, rsp, is_json):
if rsp.status_code != 200:
self.__raise_exception(rsp)
if is_json and len(rsp.content):
rsp_val = rsp.json()
if logger.getEffectiveLevel() == logging.DEBUG:
logger.debug(
"rsp: status: %d body: '%s'" % (rsp.status_code, rsp_val))
return rsp_val
return rsp
def __raise_exception(self, rsp):
data = {}
try:
data.update(rsp.json())
except Exception as e:
# The result is not json.
data["message"] = "Your HTTP request was invalid '%s', response not " \
"JSON formatted. %s" % (rsp.status_code, e)
data["status"] = rsp.status_code
# If the status code can't be found, then ZmlpRequestException is returned.
ex_class = translate_exception(rsp.status_code)
raise ex_class(data)
def get_url(self, path, body=None):
"""
Returns the full URL including the configured server part.
"""
url = urljoin(self.server, path)
if logger.getEffectiveLevel() == logging.DEBUG:
logger.debug("url: '%s' path: '%s' body: '%s'" % (url, path, body))
return url
def headers(self, content_type="application/json"):
"""
Generate the return some request headers.
Args:
content_type(str): The content-type for the request. Defaults to
'application/json'
Returns:
dict: An http header struct.
"""
header = {'Authorization': "Bearer {}".format(self.__sign_request())}
if content_type:
header['Content-Type'] = content_type
if logger.getEffectiveLevel() == logging.DEBUG:
logger.debug("headers: %s" % header)
return header
def __load_apikey(self, apikey):
key_data = None
if not apikey:
return key_data
elif hasattr(apikey, 'read'):
key_data = json.load(apikey)
elif isinstance(apikey, dict):
key_data = apikey
elif isinstance(apikey, (str, bytes)):
try:
key_data = json.loads(base64.b64decode(apikey))
except binascii.Error:
raise ValueError("Invalid base64 encoded API key.")
return key_data
def __sign_request(self):
if not self.apikey:
raise RuntimeError("Unable to make request, no ApiKey has been specified.")
claims = {
'aud': self.server,
'exp': datetime.datetime.utcnow() + datetime.timedelta(seconds=60),
'accessKey': self.apikey["accessKey"],
}
if os.environ.get("ZMLP_TASK_ID"):
claims['taskId'] = os.environ.get("ZMLP_TASK_ID")
claims['jobId'] = os.environ.get("ZMLP_JOB_ID")
if self.project_id:
claims["projectId"] = self.project_id
return jwt.encode(claims, self.apikey['secretKey'], algorithm='HS512')
class SearchResult(object):
"""
A utility class for wrapping various search result formats
that come back from the ZMLP servers.
"""
def __init__(self, data, clazz):
"""
Create a new SearchResult instance.
Note that its possible to both iterate and index a SearchResult
as a list. For example
Args:
data (dict): A search response body from the ZMLP servers.
clazz (mixed): A class to wrap each item in the response body.
"""
self.items = [clazz(item) for item in data["list"]]
self.offset = data["page"]["from"]
self.size = len(data["list"])
self.total = data["page"]["totalCount"]
def __iter__(self):
return iter(self.items)
def __getitem__(self, idx):
return self.items[idx]
def to_json(obj, indent=None):
"""
Convert the given object to a JSON string using
the ZmlpJsonEncoder.
Args:
obj (mixed): any json serializable python object.
indent (int): The indentation level for the json, or None for compact.
Returns:
str: The serialized object
"""
val = json.dumps(obj, cls=ZmlpJsonEncoder, indent=indent)
if logger.getEffectiveLevel() == logging.DEBUG:
logger.debug("json: %s" % val)
return val
class ZmlpJsonEncoder(json.JSONEncoder):
"""
JSON encoder for with ZMLP specific serialization defaults.
"""
def default(self, obj):
if hasattr(obj, 'for_json'):
return obj.for_json()
elif isinstance(obj, (set, frozenset)):
return list(obj)
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, datetime.time):
return obj.isoformat()
elif isinstance(obj, decimal.Decimal):
return float(obj)
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class ZmlpClientException(ZmlpException):
"""The base exception class for all ZmlpClient related Exceptions."""
pass
class ZmlpRequestException(ZmlpClientException):
"""
The base exception class for all exceptions thrown from zmlp.
"""
def __init__(self, data):
super(ZmlpClientException, self).__init__(
data.get("message", "Unknown request exception"))
self.__data = data
@property
def type(self):
return self.__data["exception"]
@property
def cause(self):
return self.__data["cause"]
@property
def endpoint(self):
return self.__data["path"]
@property
def status(self):
return self.__data["status"]
def __str__(self):
return "<ZmlpRequestException msg=%s>" % self.__data["message"]
class ZmlpConnectionException(ZmlpClientException):
"""
This exception is thrown if the client encounters a connectivity issue
with the Zmlp API servers..
"""
pass
class ZmlpWriteException(ZmlpRequestException):
"""
This exception is thrown the Zmlp fails a write operation.
"""
def __init__(self, data):
super(ZmlpWriteException, self).__init__(data)
class ZmlpSecurityException(ZmlpRequestException):
"""
This exception is thrown if Zmlp fails a security check on the request.
"""
def __init__(self, data):
super(ZmlpSecurityException, self).__init__(data)
class ZmlpNotFoundException(ZmlpRequestException):
"""
This exception is thrown if the Zmlp fails a read operation because
a piece of named data cannot be found.
"""
def __init__(self, data):
super(ZmlpNotFoundException, self).__init__(data)
class ZmlpDuplicateException(ZmlpWriteException):
"""
This exception is thrown if the Zmlp fails a write operation because
the newly created element would be a duplicate.
"""
def __init__(self, data):
super(ZmlpDuplicateException, self).__init__(data)
class ZmlpInvalidRequestException(ZmlpRequestException):
"""
This exception is thrown if the request sent to Zmlp is invalid in
some way, similar to an IllegalArgumentException.
"""
def __init__(self, data):
super(ZmlpInvalidRequestException, self).__init__(data)
"""
A map of HTTP response codes to local exception types.
"""
EXCEPTION_MAP = {
404: ZmlpNotFoundException,
409: ZmlpDuplicateException,
500: ZmlpInvalidRequestException,
400: ZmlpInvalidRequestException,
401: ZmlpSecurityException,
403: ZmlpSecurityException
}
def translate_exception(status_code):
"""
Translate the HTTP status code into one of the exceptions.
Args:
status_code (int): the HTTP status code
Returns:
Exception: the exception to throw for the given status code
"""
return EXCEPTION_MAP.get(status_code, ZmlpRequestException)
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/client.py
|
client.py
|
# flake8: noqa
from .entity import *
from .search import *
from .app.zmlp_app import ZmlpApp, app_from_env
from .client import ZmlpClient, to_json
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/__init__.py
|
__init__.py
|
from .base import BaseEntity
class CustomField(BaseEntity):
"""
Fields are used to store your own metadata on an asset.
"""
def __init__(self, data):
super(CustomField, self).__init__(data)
@property
def type(self):
"""The ES field data type."""
return self._data['type']
@property
def name(self):
"""The base field name."""
return self._data['name']
@property
def es_field_name(self):
"""The full ES field name"""
return self._data['esField']
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/field.py
|
field.py
|
import json
import logging
import os
from ..client import to_json
from ..util import as_collection
__all__ = [
'Asset',
'FileImport',
'FileUpload',
'StoredFile',
'FileTypes'
]
logger = logging.getLogger(__name__)
class DocumentMixin(object):
"""
A Mixin class which provides easy access to a deeply nested dictionary.
"""
def __init__(self):
self.document = {}
def set_attr(self, attr, value):
"""Set the value of an attribute.
Args:
attr (str): The attribute name in dot notation format.
ex: 'foo.bar'
value (:obj:`object`): value: The value for the particular
attribute. Can be any json serializable type.
"""
self.__set_attr(attr, value)
def del_attr(self, attr):
"""
Delete the attribute from the document. If the attribute does not exist
or is protected by a manual field edit then return false. Otherwise,
delete the attribute and return true.
Args:
attr (str): The attribute name.
Returns:
bool: True if the attribute was deleted.
"""
doc = self.document
parts = attr.split(".")
for k in parts[0:-1]:
if not isinstance(doc, dict) or k not in doc:
return False
doc = doc.get(k)
attr_name = parts[-1]
try:
del doc[attr_name]
return not self.attr_exists(attr)
except KeyError:
return False
def get_attr(self, attr, default=None):
"""Get the given attribute to the specified value.
Args:
attr (str): The attribute name in dot notation format.
ex: 'foo.bar'
default (:obj:`mixed`) The default value if no attr exists.
Returns:
mixed: The value of the attribute.
"""
doc = self.document
parts = attr.split(".")
for k in parts:
if not isinstance(doc, dict) or k not in doc:
return default
doc = doc.get(k)
return doc
def attr_exists(self, attr):
"""
Return true if the given attribute exists.
Args:
attr (str): The name of the attribute to check.
Returns:
bool: true if the attr exists.
"""
doc = self.document
parts = attr.split(".")
for k in parts[0:len(parts) - 1]:
if k not in doc:
return False
doc = doc.get(k)
return parts[-1] in doc
def add_analysis(self, name, val):
"""Add an analysis structure to the document.
Args:
name (str): The name of the analysis
val (mixed): the value/result of the analysis.
"""
if not name:
raise ValueError("Analysis requires a unique name")
attr = "analysis.%s" % name
if val is None:
self.set_attr(attr, None)
else:
self.set_attr(attr, json.loads(to_json(val)))
def get_analysis(self, namespace):
"""
Return the the given analysis data under the the given name.
Args:
namespace (str): The model namespace / pipeline module name.
Returns:
dict: An arbitrary dictionary containing predictions, content, etc.
"""
name = getattr(namespace, "namespace", "analysis.{}".format(namespace))
return self.get_attr(name)
def get_predicted_labels(self, namespace, min_score=None):
"""
Get all predictions made by the given label prediction module. If no
label predictions are present, returns None.
Args:
namespace (str): The analysis namespace, example 'zvi-label-detection'.
min_score (float): Filter results by a minimum score.
Returns:
list: A list of dictionaries containing the predictions
"""
name = getattr(namespace, "namespace", "analysis.{}".format(namespace))
predictions = self.get_attr(f'{name}.predictions')
if not predictions:
return None
if min_score:
return [pred for pred in predictions if pred['score'] >= min_score]
else:
return predictions
def get_predicted_label(self, namespace, label):
"""
Get a prediction made by the given label prediction module. If no
label predictions are present, returns None.
Args:
namespace (str): The model / module name that created the prediction.
label (mixed): A label name or integer index of a prediction.
Returns:
dict: a prediction dict with a label, score, etc.
"""
preds = self.get_predicted_labels(namespace)
if not preds:
return None
if isinstance(label, str):
preds = [pred for pred in preds if pred['label'] == label]
label = 0
try:
return preds[label]
except IndexError:
return None
def extend_list_attr(self, attr, items):
"""
Adds the given items to the given attr. The attr must be a list or set.
Args:
attr (str): The name of the attribute
items (:obj:`list` of :obj:`mixed`): A list of new elements.
"""
items = as_collection(items)
all_items = self.get_attr(attr)
if all_items is None:
all_items = set()
self.set_attr(attr, all_items)
try:
all_items.update(items)
except AttributeError:
all_items.extend(items)
def __set_attr(self, attr, value):
"""
Handles setting an attribute value.
Args:
attr (str): The attribute name in dot notation format. ex: 'foo.bar'
value (mixed): The value for the particular attribute.
Can be any json serializable type.
"""
doc = self.document
parts = attr.split(".")
for k in parts[0:len(parts) - 1]:
if k not in doc:
doc[k] = {}
doc = doc[k]
if isinstance(value, dict):
doc[parts[-1]] = value
else:
try:
doc[parts[-1]] = value.for_json()
except AttributeError:
doc[parts[-1]] = value
def __setitem__(self, field, value):
self.set_attr(field, value)
def __getitem__(self, field):
return self.get_attr(field)
class FileImport(object):
"""
An FileImport is used to import a new file and metadata into ZMLP.
"""
def __init__(self, uri, custom=None, page=None, label=None, tmp=None):
"""
Construct an FileImport instance which can point to a remote URI.
Args:
uri (str): a URI locator to the file asset.
custom (dict): Values for custom metadata fields.
page (int): The specific page to import if any.
label (Label): An optional Label which will add the file to
a Model training set.
tmp: (dict): A dict of temp attrs that are removed after procssing.
"""
super(FileImport, self).__init__()
self.uri = uri
self.custom = custom or {}
self.page = page
self.label = label
self.tmp = tmp
def for_json(self):
"""Returns a dictionary suitable for JSON encoding.
The ZpsJsonEncoder will call this method automatically.
Returns:
:obj:`dict`: A JSON serializable version of this Document.
"""
return {
"uri": self.uri,
"custom": self.custom,
"page": self.page,
"label": self.label,
"tmp": self.tmp
}
def __setitem__(self, field, value):
self.custom[field] = value
def __getitem__(self, field):
return self.custom[field]
class FileUpload(FileImport):
"""
FileUpload instances point to a local file that will be uploaded for analysis.
"""
def __init__(self, path, custom=None, page=None, label=None):
"""
Create a new FileUpload instance.
Args:
path (str): A path to a file, the file must exist.
custom (dict): Values for pre-created custom metadata fields.
page (int): The specific page to import if any.
label (Label): An optional Label which will add the file to
a Model training set.
"""
super(FileUpload, self).__init__(
os.path.normpath(os.path.abspath(path)), custom, page, label)
if not os.path.exists(path):
raise ValueError('The path "{}" does not exist'.format(path))
def for_json(self):
"""Returns a dictionary suitable for JSON encoding.
The ZpsJsonEncoder will call this method automatically.
Returns:
:obj:`dict`: A JSON serializable version of this Document.
"""
return {
"uri": self.uri,
"page": self.page,
"label": self.label,
"custom": self.custom
}
class Asset(DocumentMixin):
"""
An Asset represents a single processed file. Assets start out
in the 'CREATED' state, which indicates they've been created by not processed.
Once an asset has been processed and augmented with files created by various
analysis modules, the Asset will move into the 'ANALYZED' state.
"""
def __init__(self, data):
super(Asset, self).__init__()
if not data:
raise ValueError("Error creating Asset instance, Assets must have an id.")
self.id = data.get("id")
self.document = data.get("document", {})
self.score = data.get("score", 0)
self.inner_hits = data.get("inner_hits", [])
@staticmethod
def from_hit(hit):
"""
Converts an ElasticSearch hit into an Asset.
Args:
hit (dict): An raw ES document
Returns:
Asset: The Asset.
"""
return Asset({
'id': hit['_id'],
'score': hit.get('_score', 0),
'document': hit.get('_source', {}),
'inner_hits': hit.get('inner_hits', [])})
@property
def uri(self):
"""
The URI of the asset.
Returns:
str: The URI of the data.
"""
return self.get_attr("source.path")
@property
def extension(self):
"""
The file extension of the asset, lower cases.
Returns:
str: The file extension
"""
return self.get_attr("source.extension").lower()
def add_file(self, stored_file):
"""
Adds the StoredFile record to the asset's list of associated files.
Args:
stored_file (StoredFile): A file that has been stored in ZMLP
Returns:
bool: True if the file was added to the list, False if it was a duplicate.
"""
# Ensure the file doesn't already exist in the metadata
if not self.get_files(id=stored_file.id):
files = self.get_attr("files") or []
files.append(stored_file._data)
self.set_attr("files", files)
return True
return False
def get_files(self, name=None, category=None, mimetype=None, extension=None,
id=None, attrs=None, attr_keys=None, sort_func=None):
"""
Return all stored files associated with this asset. Optionally
filter the results.
Args:
name (str): The associated files name.
category (str): The associated files category, eg proxy, backup, etc.
mimetype (str): The mimetype must start with this string.
extension: (str): The file name must have the given extension.
attrs (dict): The file must have all of the given attributes.
attr_keys: (list): A list of attribute keys that must be present.
sort_func: (func): A lambda function for sorting the result.
Returns:
list of StoredFile: A list of ZMLP file records.
"""
result = []
files = self.get_attr("files") or []
for fs in files:
match = True
if id and not any((item for item in as_collection(id)
if fs["id"] == item)):
match = False
if name and not any((item for item in as_collection(name)
if fs["name"] == item)):
match = False
if category and not any((item for item in as_collection(category)
if fs["category"] == item)):
match = False
if mimetype and not any((item for item in as_collection(mimetype)
if fs["mimetype"].startswith(item))):
match = False
if extension and not any((item for item in as_collection(extension)
if fs["name"].endswith("." + item))):
match = False
file_attrs = fs.get("attrs", {})
if attr_keys:
if not any(key in file_attrs for key in as_collection(attr_keys)):
match = False
if attrs:
for k, v in attrs.items():
if file_attrs.get(k) != v:
match = False
if match:
result.append(StoredFile(fs))
if sort_func:
result = sorted(result, key=sort_func)
return result
def get_thumbnail(self, level):
"""
Return an thumbnail StoredFile record for the Asset. The level
corresponds size of the thumbnail, 0 for the smallest, and
up to N for the largest. Levels 0,1,and 2 are smaller than
the source media, level 3 or above (if they exist) will
be full resolution or higher images used for OCR purposes.
To download the thumbnail call app.assets.download_file(stored_file)
Args:
level (int): The size level, 0 for smallest up to N.
Returns:
StoredFile: A StoredFile instance or None if no image proxies exist.
"""
files = self.get_files(mimetype="image/", category="proxy",
sort_func=lambda f: f.attrs.get('width', 0))
if not files:
return None
if level >= len(files):
level = -1
return files[level]
def get_inner_hits(self, name):
"""
Return any inner hits from a collapse query.
Args:
name (str): The inner hit name.
Returns:
list[Asset]: A list of Assets.
"""
try:
return [Asset.from_hit(hit) for hit in self.inner_hits[name]['hits']['hits']]
except KeyError:
return []
def for_json(self):
"""Returns a dictionary suitable for JSON encoding.
The ZpsJsonEncoder will call this method automatically.
Returns:
:obj:`dict`: A JSON serializable version of this Document.
"""
return {
"id": self.id,
"uri": self.get_attr("source.path"),
"document": self.document,
"page": self.get_attr("media.pageNumber"),
}
def __str__(self):
return "<Asset id='{}'/>".format(self.id)
def __repr__(self):
return "<Asset id='{}' at {}/>".format(self.id, hex(id(self)))
def __hash__(self):
return hash(self.id)
def __eq__(self, other):
if not getattr(other, "id"):
return False
return other.id == self.id
class StoredFile(object):
"""
The StoredFile class represents a supporting file that has been stored in ZVI.
"""
def __init__(self, data):
self._data = data
@property
def id(self):
"""
The unique ID of the file.
"""
return self._data['id']
@property
def name(self):
"""
The file name..
"""
return self._data['name']
@property
def category(self):
"""
The file category.
"""
return self._data['category']
@property
def attrs(self):
"""
Arbitrary attributes.
"""
return self._data['attrs']
@property
def mimetype(self):
"""
The file mimetype.
"""
return self._data['mimetype']
@property
def size(self):
"""
The size of the file.
"""
return self._data['size']
@property
def cache_id(self):
"""
A string suitable for on-disk caching/filenames. Replaces
all slashes in id with underscores.
"""
return self.id.replace("/", "_")
def __str__(self):
return "<StoredFile {}>".format(self.id)
def __eq__(self, other):
return other.id
def __hash__(self):
return hash(self.id)
def for_json(self):
"""Return a JSON serialized copy.
Returns:
:obj:`dict`: A json serializable dict.
"""
serializable_dict = {}
attrs = self._data.keys()
for attr in attrs:
if getattr(self, attr, None) is not None:
serializable_dict[attr] = getattr(self, attr)
return serializable_dict
class FileTypes:
"""
A class for storing the supported file types.
"""
videos = frozenset(['mov', 'mp4', 'mpg', 'mpeg', 'm4v', 'webm', 'ogv', 'ogg', 'mxf', 'avi'])
"""A set of supported video file formats."""
images = frozenset(["bmp", "cin", "dpx", "gif", "jpg",
"jpeg", "exr", "png", "psd", "rla", "tif", "tiff",
"dcm", "rla"])
"""A set of supported image file formats."""
documents = frozenset(['pdf', 'doc', 'docx', 'ppt', 'pptx', 'xls', 'xlsx', 'vsd', 'vsdx'])
"""A set of supported document file formats."""
all = frozenset(videos.union(images).union(documents))
"""A set of all supported file formats."""
@classmethod
def resolve(cls, file_types):
"""
Resolve a list of file extenions or types (images, documents, videos) to
a supported list of extensions.
Args:
file_types (list): A list of file extensions, dot not included.
Returns:
list: The valid list of extensions from the given list
"""
file_types = as_collection(file_types)
if not file_types:
return cls.all
result = set()
for file_type in file_types:
if file_type in cls.all:
result.add(file_type)
else:
exts = getattr(cls, file_type, None)
if exts:
result.update(exts)
return sorted(list(result))
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/asset.py
|
asset.py
|
from .base import BaseEntity
__all__ = [
'DataSource'
]
class DataSource(BaseEntity):
"""
A DataSource is a remote source for Assets that can be
iterated by the Analysis framework and imported
in a single import Job.
"""
def __init__(self, data):
super(DataSource, self).__init__(data)
@property
def id(self):
"""The id of the DataSource"""
return self._data['id']
@property
def name(self):
"""The name of the DataSource"""
return self._data['name']
@property
def uri(self):
"""The URI of the DataSource"""
return self._data['uri']
@property
def file_types(self):
"""The file type filter for the DataSource"""
return self._data.get('file_types', [])
@property
def modules(self):
"""The type of modules done to the DataSource"""
return self._data.get('modules', [])
@property
def credentials(self):
"""The type of credentials attached to DataSource"""
return self._data.get('credentials', [])
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/datasource.py
|
datasource.py
|
from datetime import datetime
class BaseEntity:
def __init__(self, data):
self._data = data
@property
def id(self):
"""The id of the Entity"""
return self._data['id']
@property
def time_created(self):
"""The date/time the entity was created."""
return datetime.fromtimestamp(self._data['timeCreated'] / 1000.0)
@property
def time_modified(self):
"""The date/time the entity was modified."""
return datetime.fromtimestamp(self._data['timeModified'] / 1000.0)
@property
def actor_created(self):
"""The UUID of the actor that created the entity."""
return self._data['actorCreated']
@property
def actor_modified(self):
"""The UUID of the actor that modified the entity."""
return self._data['actorModified']
def __hash__(self):
return hash(self._data['id'])
def __eq__(self, other):
return self._data['id'] == getattr(other, 'id', None)
def __str__(self):
vals = [self.__class__.__name__, self.id]
name = self._data.get('name')
if name:
vals.append(name)
return "<{} id={} name={}>".format(*vals)
else:
return "<{} id={}>".format(*vals)
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/base.py
|
base.py
|
from .base import BaseEntity
__all__ = [
'AnalysisModule'
]
class AnalysisModule(BaseEntity):
"""
An AnalysisModule describes a type of ML process that gets applied to an asset.
"""
def __init__(self, data):
super(AnalysisModule, self).__init__(data)
@property
def name(self):
"""The name of the AnalysisModule"""
return self._data['name']
@property
def type(self):
"""The type of ML operation the AnalysisModule accomplishes."""
return self._data['type']
@property
def supported_media(self):
"""The types of media supported by the AnalysisModule """
return self._data['supportedMedia']
@property
def category(self):
"""The category/brand of AnalysisModule, example: Google Video Intelligence"""
return self._data['category']
@property
def provider(self):
"""The provider of the AnalysisModule, example as Zorroa, Google, Amazon"""
return self._data['provider']
@property
def description(self):
"""The description of the AnalysisModule"""
return self._data['description']
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/analysis.py
|
analysis.py
|
from datetime import datetime
from .base import BaseEntity
from ..util import ObjectView
__all__ = [
'Job',
'Task',
'TaskError'
]
class Job(BaseEntity):
"""
A Job represents a backend data process. Jobs are made up of Tasks
which are scheduled to execute on Analyst data processing nodes.
"""
def __init__(self, data):
super(Job, self).__init__(data)
@property
def name(self):
"""The name of the Job"""
return self._data['name']
@property
def state(self):
"""The state of the Job"""
return self._data['state']
@property
def paused(self):
"""True if the Job is paused."""
return self._data['paused']
@property
def priority(self):
"""The priority of the Job"""
return self._data['priority']
@property
def time_started(self):
"""The datetime the job got the first analyst."""
if self._data['timeStarted'] == -1:
return None
else:
return datetime.fromtimestamp(self._data['timeStarted'] / 1000.0)
@property
def time_stopped(self):
"""The datetime the job finished."""
if self._data['timeStopped'] == -1:
return None
else:
return datetime.fromtimestamp(self._data['timeStopped'] / 1000.0)
@property
def asset_counts(self):
"""Asset counts for the Job"""
return ObjectView(self._data['assetCounts'])
@property
def task_counts(self):
"""Task counts for the Job"""
return ObjectView(self._data['taskCounts'])
@property
def time_modified(self):
"""The date/time the Job was modified."""
return datetime.fromtimestamp(self._data['timeUpdated'] / 1000.0)
class Task(BaseEntity):
"""
Jobs contain Tasks and each Task handles the processing for 1 or more files/assets.
"""
def __init__(self, data):
super(Task, self).__init__(data)
@property
def job_id(self):
"""The Job Id"""
return self._data['jobId']
@property
def name(self):
"""The name of the Task"""
return self._data['name']
@property
def state(self):
"""The name of the Task"""
return self._data['state']
@property
def time_started(self):
"""The datetime the job got the first analyst."""
if self._data['timeStarted'] == -1:
return None
else:
return datetime.fromtimestamp(self._data['timeStarted'] / 1000.0)
@property
def time_stopped(self):
"""The datetime the job finished."""
if self._data['timeStopped'] == -1:
return None
else:
return datetime.fromtimestamp(self._data['timeStopped'] / 1000.0)
@property
def time_pinged(self):
"""The datetime the running task sent a watch dog ping."""
if self._data['timePing'] == -1:
return None
else:
return datetime.fromtimestamp(self._data['timePing'] / 1000.0)
@property
def time_modified(self):
"""The date/time the Job was modified."""
return self.time_pinged
@property
def asset_counts(self):
return ObjectView(self._data['assetCounts'])
class TaskError:
"""
A TaskError contains information regarding a failed Task or Asset.
"""
def __init__(self, data):
self._data = data
@property
def id(self):
"""ID of the TaskError"""
return self._data['id']
@property
def task_id(self):
"""UUID of the Task that encountered an error."""
return self._data['taskId']
@property
def job_id(self):
"""UUID of the Job that encountered an error."""
return self._data['jobId']
@property
def datasource_id(self):
"""UUID of the DataSource that encountered an error."""
return self._data['dataSourceId']
@property
def asset_id(self):
"""ID of the Asset that encountered an error."""
return self._data['assetId']
@property
def path(self):
"""File path or URI that was being processed."""
return self._data['path']
@property
def message(self):
"""Error message from the exception that generated the error."""
return self._data['message']
@property
def processor(self):
"""Processor in which the error occurred."""
return self._data['processor']
@property
def fatal(self):
"""True if the error was fatal and the Asset was not processed."""
return self._data['fatal']
@property
def phase(self):
"""Phase at which the error occurred: generate, execute, teardown."""
return self._data['phase']
@property
def time_created(self):
"""The date/time the entity was created."""
return datetime.fromtimestamp(self._data['timeCreated'] / 1000.0)
@property
def stack_trace(self):
"""Full stack trace from the error, if any."""
return self._data['stackTrace']
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/job.py
|
job.py
|
from enum import Enum
from .base import BaseEntity
__all__ = [
'Project',
'ProjectTier'
]
class ProjectTier(Enum):
"""
ProjectTiers determine which features are available to a project.s
"""
ESSENTIALS = 0
"""Allows the use of essentials features."""
PREMIER = 1
"""Allows the use of premier features."""
class Project(BaseEntity):
"""
Represents a ZMLP Project.
"""
def __init__(self, data):
super(Project, self).__init__(data)
@property
def name(self):
"""The project's unique name."""
return self._data['name']
@property
def id(self):
"""The project's unique id."""
return self._data['id']
@property
def tier(self):
"""The project billing tier"""
return ProjectTier[self._data['tier']]
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/project.py
|
project.py
|
__all__ = [
'ZmlpException'
]
class ZmlpException(Exception):
"""
The base exception for the ZMLP library.
"""
pass
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/exception.py
|
exception.py
|
from enum import Enum
from .base import BaseEntity
from ..util import as_id
__all__ = [
'Model',
'ModelType',
'Label',
'LabelScope',
'ModelTypeInfo'
]
class ModelType(Enum):
"""
Types of models that can be Trained.
"""
ZVI_KNN_CLASSIFIER = 0
"""A KMeans clustering model for quickly clustering assets into general groups."""
ZVI_LABEL_DETECTION = 1
"""Retrain the ResNet50 convolutional neural network with your own labels."""
ZVI_FACE_RECOGNITION = 2
"""Face Recognition model using a KNN classifier."""
GCP_LABEL_DETECTION = 4
"""Train a Google AutoML vision model."""
TF2_IMAGE_CLASSIFIER = 5
"""Provide your own custom Tensorflow2/Keras model"""
PYTORCH_IMAGE_CLASSIFIER = 5
"""Provide your own custom Pytorch model"""
class LabelScope(Enum):
"""
Types of label scopes
"""
TRAIN = 1
"""The label marks the Asset as part of the Training set."""
TEST = 2
"""The label marks the Asset as part of the Test set."""
class Model(BaseEntity):
def __init__(self, data):
super(Model, self).__init__(data)
@property
def name(self):
"""The name of the Model"""
return self._data['name']
@property
def module_name(self):
"""The name of the Pipeline Module"""
return self._data['moduleName']
@property
def namespace(self):
"""The name of the Pipeline Module"""
return 'analysis.{}'.format(self._data['moduleName'])
@property
def type(self):
"""The type of model"""
return ModelType[self._data['type']]
@property
def file_id(self):
"""The file ID of the trained model"""
return self._data['fileId']
@property
def ready(self):
"""
True if the model is fully trained and ready to use.
Adding new labels will set ready to false.
"""
return self._data['ready']
def make_label(self, label, bbox=None, simhash=None, scope=None):
"""
Make an instance of a Label which can be used to label assets.
Args:
label (str): The label name.
bbox (list[float]): A open bounding box.
simhash (str): An associated simhash, if any.
scope (LabelScope): The scope of the image, can be TEST or TRAIN.
Defaults to TRAIN.
Returns:
Label: The new label.
"""
return Label(self, label, bbox=bbox, simhash=simhash, scope=scope)
def make_label_from_prediction(self, label, prediction, scope=None):
"""
Make a label from a prediction. This will copy the bbox
and simhash from the prediction, if any.
Args:
label (str): A name for the prediction.
prediction (dict): A prediction from an analysis namespace.s
scope (LabelScope): The scope of the image, can be TEST or TRAIN.
Defaults to TRAIN.
Returns:
Label: A new label
"""
return Label(self, label,
bbox=prediction.get('bbox'),
simhash=prediction.get('simhash'),
scope=scope)
def get_label_search(self, scope=None):
"""
Return a search that can be used to query all assets
with labels.
Args:
scope (LabelScope): An optional label scope to filter by.
Returns:
dict: A search to pass to an asset search.
"""
search = {
'size': 64,
'sort': [
'_doc'
],
'_source': ['labels', 'files'],
'query': {
'nested': {
'path': 'labels',
'query': {
'bool': {
'must': [
{'term': {'labels.modelId': self.id}}
]
}
}
}
}
}
if scope:
must = search['query']['nested']['query']['bool']['must']
must.append({'term': {'labels.scope': scope.name}})
return search
def get_confusion_matrix_search(self, min_score=0.0, max_score=1.0, test_set_only=True):
"""
Returns a search query with aggregations that can be used to create a confusion
matrix.
Args:
min_score (float): Minimum confidence score to return results for.
max_score (float): Maximum confidence score to return results for.
test_set_only (bool): If True only assets with TEST labels will be evaluated.
Returns:
dict: A search to pass to an asset search.
"""
prediction_term_map = {
ModelType.ZVI_KNN_CLASSIFIER: f'{self.namespace}.label',
ModelType.ZVI_FACE_RECOGNITION: f'{self.namespace}.predictions.label'
}
score_map = {ModelType.ZVI_KNN_CLASSIFIER: f'{self.namespace}.score',
ModelType.ZVI_LABEL_DETECTION: f'{self.namespace}.score',
ModelType.ZVI_FACE_RECOGNITION: f'{self.namespace}.predictions.score'}
if self.type not in prediction_term_map:
raise TypeError(f'Cannot create a confusion matrix search for {self.type} models.')
search_query = {
"size": 0,
"query": {
"bool": {
"filter": [
{"range": {score_map[self.type]: {"gte": min_score, "lte": max_score}}}
]
}
},
"aggs": {
"nested_labels": {
"nested": {
"path": "labels"
},
"aggs": {
"model_train_labels": {
"filter": {
"bool": {
"must": [
{"term": {"labels.modelId": self.id}}
]
}
},
"aggs": {
"labels": {
"terms": {"field": "labels.label"},
"aggs": {
"predictions_by_label": {
"reverse_nested": {},
"aggs": {
"predictions": {
"terms": {
"field": prediction_term_map[self.type]
}
}
}
}
}
}
}
}
}
}
}
}
if test_set_only:
(search_query
['aggs']
['nested_labels']
['aggs']
['model_train_labels']
['filter']
['bool']
['must'].append({"term": {"labels.scope": "TEST"}}))
return search_query
class ModelTypeInfo:
"""
Additional properties related to each ModelType.
"""
def __init__(self, data):
self._data = data
@property
def name(self):
"""The name of the model type."""
return self._data['name']
@property
def description(self):
"""The description of the model type."""
return self._data['description']
@property
def objective(self):
"""The objective of the model, LABEL_DETECTION, FACE_RECOGNITION, etc"""
return self._data['objective']
@property
def provider(self):
"""The company that maintains the structure and algorithm for the model."""
return self._data['provider']
@property
def min_concepts(self):
"""The minimum number of unique concepts a model must have before it can be trained."""
return self._data['minConcepts']
@property
def min_examples(self):
"""
The minimum number of examples per concept a
model must have before it can be trained.
"""
return self._data['minExamples']
class Label:
"""
A Label that can be added to an Asset either at import time
or once the Asset has been imported.
"""
def __init__(self, model, label, bbox=None, simhash=None, scope=None):
"""
Create a new label.
Args:
model: (Model): The model the label is for.
label (str): The label itself.
bbox (list): A optional list of floats for a bounding box.
simhash (str): An optional similatity hash.
scope (LabelScope): The scope of the image, can be TEST or TRAIN.
Defaults to TRAIN.
"""
self.model_id = as_id(model)
self.label = label
self.bbox = bbox
self.simhash = simhash
self.scope = scope or LabelScope.TRAIN
def for_json(self):
"""Returns a dictionary suitable for JSON encoding.
The ZpsJsonEncoder will call this method automatically.
Returns:
:obj:`dict`: A JSON serializable version of this Document.
"""
return {
'modelId': self.model_id,
'label': self.label,
'bbox': self.bbox,
'simhash': self.simhash,
'scope': self.scope.name
}
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/model.py
|
model.py
|
# flake8: noqa
from .asset import *
from .datasource import *
from .exception import *
from .job import *
from .project import *
from .model import *
from .analysis import *
from .clip import *
|
zvi-client
|
/zvi-client-1.1.3.tar.gz/zvi-client-1.1.3/pylib/zmlp/entity/__init__.py
|
__init__.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.