Skip to content

Commit

Permalink
Merge of dev into master
Browse files Browse the repository at this point in the history
v2.6.0 release.

Last version to support Python 2.7
  • Loading branch information
jamesba authored Oct 22, 2019
2 parents 749641c + f481fe7 commit 442ef79
Show file tree
Hide file tree
Showing 29 changed files with 4,121 additions and 99 deletions.
3 changes: 2 additions & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[flake8]
max-line-length = 160
exclude = .git,.tox,dist,deb_dist,__pycache__
exclude = .git,.tox,dist,deb_dist,__pycache__,._*
ignore = E121,E123,E126,E226,E24,E704,W503,W504
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
# Mediagrains Library Changelog

## 2.6.0
- Added support for async methods to gsf decoder in python 3.6+
- Added `Grain.origin_timerange` method.
- Added `Grain.normalise_time` method.
- Added `Colourbars` test signal generator
- Added `MovingBarOverlay` for test signal generators
- Added `mediagrains.numpy` sublibrary for handling video grains as numpy arrays, in python 3.6+
- Added `PSNR` option to grain compare.
- Support for converting between all uncompressed video grain formats added to `mediagrains.numpy`
- This is the last release that will support python 2.7 (apart from bugfixes)

## 2.5.3
- BUGFIX: IOBytes doesn't quite fulfil bytes-like contracts, but can be converted to something that does

Expand Down
88 changes: 68 additions & 20 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,18 @@

pipeline {
agent {
label "ubuntu&&apmm-slave"
label "ubuntu&&apmm-slave&&18.04"
}
options {
ansiColor('xterm') // Add support for coloured output
buildDiscarder(logRotator(numToKeepStr: '10')) // Discard old builds
}
triggers {
cron(env.BRANCH_NAME == 'master' ? 'H H(0-8) * * *' : '') // Build master some time every morning
cron((env.BRANCH_NAME == 'master' || env.BRANCH_NAME == 'dev')? 'H H(0-8) * * *' : '') // Build master some time every morning
}
parameters {
booleanParam(name: "FORCE_PYUPLOAD", defaultValue: false, description: "Force Python artifact upload")
booleanParam(name: "FORCE_PYPIUPLOAD", defaultValue: false, description: "Force Python artifact upload to PyPi")
booleanParam(name: "FORCE_PYUPLOAD", defaultValue: false, description: "Force Python artifact upload to internal BBC repo")
booleanParam(name: "FORCE_DEBUPLOAD", defaultValue: false, description: "Force Debian package upload")
booleanParam(name: "FORCE_DOCSUPLOAD", defaultValue: false, description: "Force docs upload")
}
Expand All @@ -43,24 +44,42 @@ pipeline {
}
stage ("Tests") {
parallel {
stage ("Linting Check") {
stage ("Py2.7 Linting Check") {
steps {
script {
env.lint_result = "FAILURE"
env.lint27_result = "FAILURE"
}
bbcGithubNotify(context: "lint/flake8", status: "PENDING")
bbcGithubNotify(context: "lint/flake8_27", status: "PENDING")
// Run the linter
sh 'flake8'
sh 'python2.7 -m flake8 --filename=mediagrains/*.py,tests/test_*.py'
script {
env.lint_result = "SUCCESS" // This will only run if the sh above succeeded
env.lint27_result = "SUCCESS" // This will only run if the sh above succeeded
}
}
post {
always {
bbcGithubNotify(context: "lint/flake8", status: env.lint_result)
bbcGithubNotify(context: "lint/flake8_27", status: env.lint27_result)
}
}
}
stage ("Py36 Linting Check") {
steps {
script {
env.lint3_result = "FAILURE"
}
bbcGithubNotify(context: "lint/flake8_3", status: "PENDING")
// Run the linter
sh 'python3 -m flake8 --filename=mediagrains/*.py,mediagrains_async/*.py,tests/test_*.py,tests/atest_*.py'
script {
env.lint3_result = "SUCCESS" // This will only run if the sh above succeeded
}
}
post {
always {
bbcGithubNotify(context: "lint/flake8_3", status: env.lint3_result)
}
}
}
stage ("Build Docs") {
steps {
sh 'TOXDIR=/tmp/$(basename ${WORKSPACE})/tox-docs make docs'
Expand Down Expand Up @@ -89,18 +108,18 @@ pipeline {
stage ("Python 3 Unit Tests") {
steps {
script {
env.py3_result = "FAILURE"
env.py36_result = "FAILURE"
}
bbcGithubNotify(context: "tests/py3", status: "PENDING")
bbcGithubNotify(context: "tests/py36", status: "PENDING")
// Use a workdirectory in /tmp to avoid shebang length limitation
sh 'tox -e py3 --recreate --workdir /tmp/$(basename ${WORKSPACE})/tox-py3'
sh 'tox -e py36 --recreate --workdir /tmp/$(basename ${WORKSPACE})/tox-py36'
script {
env.py3_result = "SUCCESS" // This will only run if the sh above succeeded
env.py36_result = "SUCCESS" // This will only run if the sh above succeeded
}
}
post {
always {
bbcGithubNotify(context: "tests/py3", status: env.py3_result)
bbcGithubNotify(context: "tests/py36", status: env.py36_result)
}
}
}
Expand Down Expand Up @@ -162,10 +181,11 @@ pipeline {
when {
anyOf {
expression { return params.FORCE_PYUPLOAD }
expression { return params.FORCE_PYPIUPLOAD }
expression { return params.FORCE_DEBUPLOAD }
expression { return params.FORCE_DOCSUPLOAD }
expression {
bbcShouldUploadArtifacts(branches: ["master"])
bbcShouldUploadArtifacts(branches: ["master", "dev"])
}
}
}
Expand All @@ -175,7 +195,7 @@ pipeline {
anyOf {
expression { return params.FORCE_DOCSUPLOAD }
expression {
bbcShouldUploadArtifacts(branches: ["master"])
bbcShouldUploadArtifacts(branches: ["master", "dev"])
}
}
}
Expand All @@ -186,7 +206,7 @@ pipeline {
stage ("Upload to PyPi") {
when {
anyOf {
expression { return params.FORCE_PYUPLOAD }
expression { return params.FORCE_PYPIUPLOAD }
expression {
bbcShouldUploadArtifacts(branches: ["master"])
}
Expand All @@ -199,8 +219,8 @@ pipeline {
bbcGithubNotify(context: "pypi/upload", status: "PENDING")
sh 'rm -rf dist/*'
bbcMakeGlobalWheel("py27")
bbcMakeGlobalWheel("py3")
bbcTwineUpload(toxenv: "py3", pypi: true)
bbcMakeGlobalWheel("py36")
bbcTwineUpload(toxenv: "py36", pypi: true)
script {
env.pypiUpload_result = "SUCCESS" // This will only run if the steps above succeeded
}
Expand All @@ -211,6 +231,34 @@ pipeline {
}
}
}
stage ("Upload to Artifactory") {
when {
anyOf {
expression { return params.FORCE_PYUPLOAD }
expression {
bbcShouldUploadArtifacts(branches: ["dev"])
}
}
}
steps {
script {
env.artifactoryUpload_result = "FAILURE"
}
bbcGithubNotify(context: "artifactory/upload", status: "PENDING")
sh 'rm -rf dist/*'
bbcMakeGlobalWheel("py27")
bbcMakeGlobalWheel("py36")
bbcTwineUpload(toxenv: "py36", pypi: false)
script {
env.artifactoryUpload_result = "SUCCESS" // This will only run if the steps above succeeded
}
}
post {
always {
bbcGithubNotify(context: "artifactory/upload", status: env.artifactoryUpload_result)
}
}
}
stage ("upload deb") {
when {
anyOf {
Expand Down Expand Up @@ -248,7 +296,7 @@ pipeline {
}
post {
always {
bbcSlackNotify(channel: "#apmm-cloudfit")
bbcSlackNotify(channel: "#apmm-cloudfit", branches: ["master", "dev"])
}
}
}
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,6 @@ include tox.ini
include COPYING
recursive-include examples *.gsf
recursive-include tests *.py
recursive-include mediagrains_py36 *.py
include ICLA.md
include LICENSE.md
28 changes: 27 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ documentation for more details.

### Requirements

* A working Python 2.7 or Python 3.x installation
* A working Python 2.7 or Python 3.6+ installation
* BBC R&D's internal deb repository set up as a source for apt (if installing via apt-get)
* The tool [tox](https://tox.readthedocs.io/en/latest/) is needed to run the unittests, but not required to use the library.

Expand Down Expand Up @@ -85,6 +85,8 @@ it with colour-bars:
... i += 1
```

(In python3.6+ a more natural interface for accessing data exists in the form of numpy arrays. See later.)

The object grain can then be freely used for whatever video processing
is desired, or it can be serialised into a GSF file as follows:

Expand Down Expand Up @@ -159,6 +161,30 @@ between two grains, both as a printed string (as seen above) and also
in a data-centric fashion as a tree structure which can be
interrogated in code.

### Numpy arrays (Python 3.6+)

In python 3.6 or higher an additional feature is provided in the form of numpy array access to the data in a grain. As such the above example of creating colourbars can be done more easily:

```Python console
>>> from mediagrains.numpy import VideoGrain
>>> from uuid import uuid1
>>> from mediagrains.cogenums import CogFrameFormat, CogFrameLayout
>>> src_id = uuid1()
>>> flow_id = uuid1()
>>> grain = VideoGrain(src_id, flow_id, cog_frame_format=CogFrameFormat.S16_422_10BIT, width=1920, height=1080)
>>> colours = [
... (0x3FF, 0x000, 0x3FF),
... (0x3FF, 0x3FF, 0x000),
... (0x3FF, 0x000, 0x000),
... (0x3FF, 0x3FF, 0x3FF),
... (0x3FF, 0x200, 0x3FF),
... (0x3FF, 0x3FF, 0x200) ]
>>> for c in range(0, 3):
... for x in range(0, grain.components[c].width):
... for y in range(0, grain.components[c].height):
... grain.component_data[c][x, y] = colours[x*len(colours)//grain.components[c].width][c]
```

## Documentation

The API is well documented in the docstrings of the module mediagrains, to view:
Expand Down
29 changes: 29 additions & 0 deletions mediagrains/asyncio.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#!/usr/bin/python
#
# Copyright 2019 British Broadcasting Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

"""\
Asyncio compatible layer for mediagrains, but only available in python 3.6+
"""

from sys import version_info

if version_info[0] > 3 or (version_info[0] == 3 and version_info[1] >= 6):
from mediagrains_py36.asyncio import AsyncGSFDecoder, AsyncLazyLoaderUnloadedError, loads # noqa: F401

__all__ = ["AsyncGSFDecoder", "AsyncLazyLoaderUnloadedError", "loads"]
else:
__all__ = []
Loading

0 comments on commit 442ef79

Please sign in to comment.