Skip to content

Commit ca8e863

Browse files
Updated to use same provisioning mechanism as operator labs
1 parent 0454b42 commit ca8e863

File tree

8 files changed

+201
-96
lines changed

8 files changed

+201
-96
lines changed

.s2i/bin/assemble

Lines changed: 0 additions & 5 deletions
This file was deleted.
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
#!/bin/bash
2+
3+
fail()
4+
{
5+
echo $* 1>&2
6+
exit 1
7+
}
8+
9+
WORKSHOP_NAME=lab-developing-with-odo
10+
11+
JUPYTERHUB_APPLICATION=${JUPYTERHUB_APPLICATION:-developing-with-odo}
12+
13+
JUPYTERHUB_NAMESPACE=`oc project --short 2>/dev/null`
14+
15+
if [ "$?" != "0" ]; then
16+
fail "Error: Cannot determine name of project."
17+
exit 1
18+
fi
19+
20+
echo
21+
echo "### Checking if already have a build configuration."
22+
echo
23+
24+
oc get bc "$WORKSHOP_NAME" -o name 2>/dev/null
25+
26+
if [ "$?" != "0" ]; then
27+
echo "..."
28+
29+
echo
30+
echo "### Creating build configuration for workshop."
31+
echo
32+
33+
oc new-build --binary --name "$WORKSHOP_NAME"
34+
35+
if [ "$?" != "0" ]; then
36+
fail "Error: Failed to create build configuration."
37+
exit 1
38+
fi
39+
fi
40+
41+
echo
42+
echo "### Building workshop from local content."
43+
echo
44+
45+
oc start-build "$WORKSHOP_NAME" --from-dir . --follow
46+
47+
if [ "$?" != "0" ]; then
48+
fail "Error: Failed to build workshop content."
49+
exit 1
50+
fi
51+
52+
echo
53+
echo "### Updating spawner to use image for local workshop content."
54+
echo
55+
56+
oc tag "$WORKSHOP_NAME:latest" "${JUPYTERHUB_APPLICATION}-app:latest"
57+
58+
if [ "$?" != "0" ]; then
59+
fail "Error: Failed to update spawner to use image for local workshop."
60+
exit 1
61+
fi
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
#!/bin/bash
2+
3+
set -x
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
#!/bin/bash
2+
3+
set -x
4+
set -eo pipefail
5+
6+
JUPYTERHUB_APPLICATION=${JUPYTERHUB_APPLICATION:-developing-with-odo}
7+
JUPYTERHUB_NAMESPACE=`oc project --short`
8+
9+
APPLICATION_LABELS="app=$JUPYTERHUB_APPLICATION-$JUPYTERHUB_NAMESPACE,spawner=learning-portal"
10+
11+
PROJECT_RESOURCES="services,routes,deploymentconfigs,imagestreams,secrets,configmaps,serviceaccounts,rolebindings,serviceaccounts,rolebindings,persistentvolumeclaims,pods"
12+
13+
oc delete "$PROJECT_RESOURCES" --selector "$APPLICATION_LABELS"
14+
15+
CLUSTER_RESOURCES="clusterrolebindings,clusterroles"
16+
17+
oc delete "$CLUSTER_RESOURCES" --selector "$APPLICATION_LABELS"
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
#!/bin/bash
2+
3+
set -x
4+
set -eo pipefail
5+
6+
WORKSHOP_NAME=lab-developing-with-od
7+
JUPYTERHUB_APPLICATION=${JUPYTERHUB_APPLICATION:-developing-with-odo}
8+
JUPYTERHUB_NAMESPACE=`oc project --short`
9+
10+
oc delete all --selector build="$WORKSHOP_NAME"
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
#!/bin/bash
2+
3+
fail()
4+
{
5+
echo $* 1>&2
6+
exit 1
7+
}
8+
9+
WORKSHOP_IMAGE="quay.io/openshiftlabs/lab-developing-with-odo:latest"
10+
11+
TEMPLATE_REPO=https://raw.githubusercontent.com/openshift-labs/workshop-spawner
12+
TEMPLATE_VERSION=3.0.5
13+
TEMPLATE_FILE=learning-portal-production.json
14+
TEMPLATE_PATH=$TEMPLATE_REPO/$TEMPLATE_VERSION/templates/$TEMPLATE_FILE
15+
16+
JUPYTERHUB_APPLICATION=${JUPYTERHUB_APPLICATION:-developing-with-odo}
17+
18+
JUPYTERHUB_NAMESPACE=`oc project --short 2>/dev/null`
19+
20+
if [ "$?" != "0" ]; then
21+
fail "Error: Cannot determine name of project."
22+
exit 1
23+
fi
24+
25+
echo
26+
echo "### Creating spawner application."
27+
echo
28+
29+
oc process -f $TEMPLATE_PATH \
30+
--param APPLICATION_NAME="$JUPYTERHUB_APPLICATION" \
31+
--param PROJECT_NAME="$JUPYTERHUB_NAMESPACE" | oc apply -f -
32+
33+
if [ "$?" != "0" ]; then
34+
fail "Error: Failed to create deployment for spawner."
35+
exit 1
36+
fi
37+
38+
echo
39+
echo "### Waiting for the spawner to deploy."
40+
echo
41+
42+
oc rollout status dc/"$JUPYTERHUB_APPLICATION"
43+
44+
if [ "$?" != "0" ]; then
45+
fail "Error: Deployment of spawner failed to complete."
46+
exit 1
47+
fi
48+
49+
echo
50+
echo "### Updating spawner to use image for workshop."
51+
echo
52+
53+
oc tag "$WORKSHOP_IMAGE" "${JUPYTERHUB_APPLICATION}-app:latest"
54+
55+
if [ "$?" != "0" ]; then
56+
fail "Error: Failed to update spawner to use workshop image."
57+
exit 1
58+
fi
59+
60+
echo
61+
echo "### Route details for the spawner are as follows."
62+
echo
63+
64+
oc get route "${JUPYTERHUB_APPLICATION}"

Dockerfile

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ RUN source /opt/workshop/etc/profile.d/python.sh && \
44
pip install --no-cache-dir powerline-shell==0.7.0 && \
55
fix-permissions /opt/app-root
66

7-
ENV TERMINAL_TAB=split
7+
ENV TERMINAL_TAB=split ODO_VERSION=v1.0.0-beta1
88

99
RUN git clone https://github.com/openshift-labs/beercan-shooter-game.git sample && \
1010
fix-permissions /opt/app-root/src
@@ -19,13 +19,14 @@ COPY .workshop/assets/nodejs_assemble backend/.s2i/bin/assemble
1919

2020
USER root
2121

22-
COPY . /opt/app-root/src
22+
COPY . /tmp/src
2323

24-
RUN rm Dockerfile .gitignore .dockerignore && \
25-
chown -R 1001:0 /opt/app-root/src && \
26-
fix-permissions /opt/app-root/src && \
27-
mv workshop /opt/app-root/workshop
24+
RUN rm -rf /tmp/src/Dockerfile /tmp/src/.gitignore /tmp/src/.dockerignore && \
25+
rm -rf /tmp/src/.git* && \
26+
chown -R 1001 /tmp/src && \
27+
chgrp -R 0 /tmp/src && \
28+
chmod -R g+w /tmp/src
2829

2930
USER 1001
3031

31-
ENV ODO_VERSION=v1.0.0-beta1
32+
RUN /usr/libexec/s2i/assemble

README.md

Lines changed: 38 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -3,129 +3,83 @@ Developing with Odo
33

44
In this self paced workshop you will learn how to use OpenShift Do (`odo`) to build and deploy applications on the OpenShift Container Platform.
55

6-
Deploying the workshop
6+
Deploying the Workshop
77
----------------------
88

9-
To deploy the workshop for use at an event, such as a booth at a conference, where users will come and go and the number of users is unknown, you will need to deploy this to an OpenShift cluster using an account with cluster admin access.
9+
To deploy the workshop, first clone this Git repository to your own machine.
1010

11-
First create a project under which the deployment for the workshop is to run. The project name `workshop` is recommended if available.
11+
Next create a project in OpenShift into which the workshop is to be deployed.
1212

13-
```bash
14-
oc new-project workshop
1513
```
16-
17-
Now create the deployment by running:
18-
19-
```bash
20-
oc new-app https://raw.githubusercontent.com/openshift-labs/workshop-jupyterhub/develop/templates/learning-portal-production.json \
21-
--param PROJECT_NAME=workshop \
22-
--param APPLICATION_NAME=odo \
23-
--param TERMINAL_IMAGE=quay.io/jorgemoralespou/lab-devconf:master \
24-
--namespace workshop
14+
oc new-project workshops
2515
```
2616

27-
The value of the `PROJECT_NAME` template parameter must match the name of the project you created. You also need to supply the `--namespace` option with the same project name if the project isn't your current context.
17+
From within the top level of the Git repository, now run:
2818

29-
The value of the `APPLICATION_NAME` template parameter should be a name to identify the specific workshop.
19+
```
20+
./.workshop/scripts/deploy-spawner.sh
21+
```
3022

31-
The value of the `TERMINAL_IMAGE` is the name of the image on `quay.io` built from this repository, for this workshop.
23+
The name of the deployment will be ``developing-with-odo``.
3224

33-
Once deployed, run:
25+
You can determine the hostname for the URL to access the workshop by running:
3426

35-
```bash
36-
oc get route/odo --namespace workshop
27+
```
28+
oc get route developing-with-odo
3729
```
3830

39-
to determine the name of the public route to access the workshop. Each user visiting the URL will get their own instance of the application delivering the workshop content and, embedded terminals and console.
40-
41-
Working on the content
42-
----------------------
43-
44-
To make changes to the content, there are a few methods you can
45-
use.
31+
Editing the Workshop
32+
--------------------
4633

47-
The first way is to use a local docker service running on your own machine. The local container instance will be used to host the content while you work on it. You will still need a separate OpenShift cluster to test the deployments in if you want to execute any commands given in the steps.
34+
The deployment created above will use a version of the workshop which has been pre-built into an image and which is hosted on ``quay.io``.
4835

49-
To use this method, first pull down the `workshop-dashboard` container image. You can switch to using the `latest` image, or other tagged version as necessary. You should use the same version as is used in the `Dockerfile`.
36+
To make changes to the workshop content and test them, edit the files in the Git repository and then run:
5037

51-
```bash
52-
docker pull quay.io/openshiftlabs/workshop-dashboard:1.3.3
5338
```
54-
55-
Run this image with `docker run`, exposing port `10080` so you can access it from your web browser. At the same time, mount the directory for the repository into the container at the directory `/opt/app-root/src`.
56-
57-
```bash
58-
docker run --rm -p 10080:10080 -v `pwd`:/opt/app-root/src \
59-
quay.io/openshiftlabs/workshop-dashboard:1.3.3
39+
./.workshop/scripts/build-workshop.sh
6040
```
6141

62-
Open a browser window against `http://localhost:10080`. Use the docker host IP if not running on `localhost`.
63-
64-
This will allow you to check whether your content is displayed properly and navigate back and forth through the steps.
42+
This will replace the existing image used by the active deployment.
6543

66-
Because you are not running in an OpenShift cluster, if you want to test running of commands against a cluster, you will need to first run `oc login` against the cluster you want to use, and provide any credentials for logging into that cluster. Alternatively, use `oc login --token` with an access token for the cluster. If the content assumes that you already have a project created for you, create a new project as necessary.
44+
If you are running an existing instance of the workshop, from your web browser select "Restart Workshop" from the menu top right of the workshop environment dashboard.
6745

68-
Note that because you are only using the base image `workshop-dashboard` when running the container, if there are any build steps defined in the `Dockerfile`, they will not be run. You would therefore need to manually run any build steps to further setup the environment. Be aware that because you are mounting your local repository directory, those steps could add or remove files from it. Do not therefore run steps which would be destructive to the repository directory.
46+
When you are happy with your changes, push them back to the remote Git repository. This will automatically trigger a new build of the image hosted on ``quay.io``.
6947

70-
Unless these additional steps are simple and non destructive, you are best off only using this method to test the display of content and any navigation.
48+
If you need to change the RBAC definitions, or what resources are created when a project is created, change the definitions in the ``templates`` directory. You can then re-run:
7149

72-
If you desire, you can with this method keep the container running and edit the content markdown files from your local machine. When you want to check it, you need only reload the browser window, or content frame, to see the changes. You do not need to restart the container. A restart of the container would only be required if modifying the `workshop/config.js` file.
50+
```
51+
./.workshop/scripts/deploy-spawner.sh
52+
```
7353

74-
Testing the content
75-
-------------------
54+
and it will update the active definitions.
7655

77-
To build an image and test it in a local container run time, run:
56+
Note that if you do this, you will need to re-run:
7857

79-
```bash
80-
docker build -t lab-developing-with-odo .
8158
```
82-
83-
Then run it as:
84-
85-
```bash
86-
docker run --rm -p 10080:10080 lab-developing-with-odo:latest
59+
./.workshop/scripts/build-workshop.sh
8760
```
8861

89-
Open a browser window against `http://localhost:10080`. Use the docker host IP if not running on `localhost`.
62+
to have any local content changes be used once again as it will revert back to using the image on ``quay.io``.
9063

91-
As before you will need to login to any OpenShift cluster from the command line using `oc login`. In this case though, the build steps have been run, so no manual steps are required to set up the environment.
64+
Deleting the Workshop
65+
---------------------
9266

93-
To build an image and test it in conjunction with the learning portal deployment before pushing and making it public, first deploy an instance of the learning portal with an empty workshop image. Presuming the same `workshop` project is used, run:
67+
To delete the spawner and any active sessions, including projects, run:
9468

95-
```bash
96-
oc new-app https://raw.githubusercontent.com/openshift-labs/workshop-jupyterhub/develop/templates/learning-portal-production.json \
97-
--param PROJECT_NAME=workshop \
98-
--param APPLICATION_NAME=odo-test \
99-
--namespace workshop
10069
```
101-
102-
Ensure that the name of the application passed to `APPLICATION_NAME` is different to what was used above.
103-
104-
Run:
105-
106-
```bash
107-
oc get route/odo-test --namespace workshop
70+
./.workshop/scripts/delete-spawner.sh
10871
```
109-
to determine the URL for accessing this instance.
11072

111-
Next create a binary build from the repository directory:
73+
To delete the build configuration for the workshop image, run:
11274

113-
```bash
114-
oc new-build --name=lab-deploying-to-odo --binary --strategy docker
11575
```
116-
117-
Each time you want to build a new version of the image to test, run:
118-
119-
```bash
120-
oc start-build lab-deploying-to-odo --from-dir . --follow
76+
./.workshop/scripts/delete-workshop.sh
12177
```
12278

123-
Once the image has been built, update the learning portal image stream configuration to use your latest build.
79+
To delete special resources for CRDs and cluster roles for the Kafa operator, run:
12480

125-
```bash
126-
oc tag lab-deploying-to-odo:latest odo-test-app:latest
81+
```
82+
./.workshop/scripts/delete-resources.sh
12783
```
12884

129-
This must be done each time to ensure that `latest` tag of `odo-test-app` is mapped to the image hash for the latest build.
130-
131-
You can then click on the "Restart" button top right of the dashboard view to force any user session to be shutdown and a new started with the new version of the image to test.
85+
Only delete these last set of resources if the Kafa operator is not being used elsewhere in the cluster. Ideally this workshop environment should only be deployed in an expendable cluster, and not one which is shared for other work.

0 commit comments

Comments
 (0)