Compare commits
20 Commits
45ecdc768e
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| bcba4f1030 | |||
| 64ceee2080 | |||
| 8b9906672c | |||
| 252999c990 | |||
| 2ec413e21e | |||
| a203b15639 | |||
| 87546e064d | |||
| f708520371 | |||
| 53da042b20 | |||
| 8b86bb6213 | |||
| 155630f93a | |||
| a421fe69c6 | |||
| 391fae1812 | |||
| d24ae42ccf | |||
| a95f41a323 | |||
| 4f18c40d94 | |||
| fbf96ab528 | |||
| 7347065f91 | |||
| 024538ad6f | |||
| 9d7edb6ed9 |
28
DockerStack/Dockers/Dashy/dashy-compose.yaml
Normal file
28
DockerStack/Dockers/Dashy/dashy-compose.yaml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
version: "3.8"
|
||||||
|
services:
|
||||||
|
dashy:
|
||||||
|
# To build from source, replace 'image: lissy93/dashy' with 'build: .'
|
||||||
|
# build: .
|
||||||
|
image: lissy93/dashy
|
||||||
|
container_name: Dashy
|
||||||
|
# Pass in your config file below, by specifying the path on your host machine
|
||||||
|
# volumes:
|
||||||
|
# - /root/my-config.yml:/app/user-data/conf.yml
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
# Set any environmental variables
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=production
|
||||||
|
# Specify your user ID and group ID. You can find this by running `id -u` and `id -g`
|
||||||
|
# - UID=1000
|
||||||
|
# - GID=1000
|
||||||
|
# Specify restart policy
|
||||||
|
restart: unless-stopped
|
||||||
|
# Configure healthchecks
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD', 'node', '/app/services/healthcheck']
|
||||||
|
interval: 1m30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 40s
|
||||||
31
DockerStack/Dockers/Homepage/homepage-compose.yaml
Normal file
31
DockerStack/Dockers/Homepage/homepage-compose.yaml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
homepage:
|
||||||
|
image: ghcr.io/gethomepage/homepage:latest
|
||||||
|
container_name: homepage
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 3065:3000
|
||||||
|
expose:
|
||||||
|
- 3065
|
||||||
|
environment:
|
||||||
|
- PUID=1000
|
||||||
|
- PGID=1000
|
||||||
|
- HOMEPAGE_ALLOWED_HOSTS=dockers.localdomain:3065 # add your FQDN here
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:ro # (optional) For docker integrations
|
||||||
|
- homep_conf:/app/config # Make sure your local config directory exists
|
||||||
|
- homep_ico:/app/public/icons
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
mode: replicated
|
||||||
|
replicas: 3
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
homep_conf:
|
||||||
|
driver: gfs_vol
|
||||||
|
homep_ico:
|
||||||
|
driver: gfs_vol
|
||||||
70
DockerStack/Dockers/Semaphore/semaphore-compose.yaml
Normal file
70
DockerStack/Dockers/Semaphore/semaphore-compose.yaml
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
# uncomment this section and comment out the mysql section to use postgres instead of mysql
|
||||||
|
#postgres:
|
||||||
|
#restart: unless-stopped
|
||||||
|
#ports:
|
||||||
|
#- 5432:5432
|
||||||
|
#image: postgres:14
|
||||||
|
#hostname: postgres
|
||||||
|
#volumes:
|
||||||
|
# - semaphore-postgres:/var/lib/postgresql/data
|
||||||
|
#environment:
|
||||||
|
# POSTGRES_USER: semaphore
|
||||||
|
# POSTGRES_PASSWORD: semaphore
|
||||||
|
# POSTGRES_DB: semaphore
|
||||||
|
# if you wish to use postgres, comment the mysql service section below
|
||||||
|
mysql:
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 3306:3306
|
||||||
|
image: mysql:8.0
|
||||||
|
hostname: mysql
|
||||||
|
volumes:
|
||||||
|
- semaphore-mysql:/var/lib/mysql
|
||||||
|
environment:
|
||||||
|
MYSQL_RANDOM_ROOT_PASSWORD: 'yes'
|
||||||
|
MYSQL_DATABASE: semaphore
|
||||||
|
MYSQL_USER: semaphore
|
||||||
|
MYSQL_PASSWORD: semaphore
|
||||||
|
deploy:
|
||||||
|
mode: replicated
|
||||||
|
replicas: 1
|
||||||
|
semaphore:
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 3000:3000
|
||||||
|
image: semaphoreui/semaphore:latest
|
||||||
|
volumes:
|
||||||
|
- semaphore_config:/config
|
||||||
|
environment:
|
||||||
|
SEMAPHORE_DB_USER: semaphore
|
||||||
|
SEMAPHORE_DB_PASS: semaphore
|
||||||
|
SEMAPHORE_DB_HOST: mysql # for postgres, change to: postgres
|
||||||
|
SEMAPHORE_DB_PORT: 3306 # change to 5432 for postgres
|
||||||
|
SEMAPHORE_DB_DIALECT: mysql
|
||||||
|
SEMAPHORE_DB: semaphore
|
||||||
|
SEMAPHORE_PLAYBOOK_PATH: /tmp/semaphore/
|
||||||
|
SEMAPHORE_ADMIN_PASSWORD: cGUtDmqTJCyaYqL2nbJm6qjBuL
|
||||||
|
SEMAPHORE_ADMIN_NAME: admin
|
||||||
|
SEMAPHORE_ADMIN_EMAIL: aveosysarch@outlook.com
|
||||||
|
SEMAPHORE_ADMIN: admin
|
||||||
|
SEMAPHORE_ACCESS_KEY_ENCRYPTION: gs72mPntFATGJs9qK0pQ0rKtfidlexiMjYCH9gWKhTU=
|
||||||
|
SEMAPHORE_LDAP_ACTIVATED: 'no' # if you wish to use ldap, set to: 'yes'
|
||||||
|
SEMAPHORE_LDAP_HOST: dc01.local.example.com
|
||||||
|
SEMAPHORE_LDAP_PORT: '636'
|
||||||
|
SEMAPHORE_LDAP_NEEDTLS: 'yes'
|
||||||
|
SEMAPHORE_LDAP_DN_BIND: 'uid=bind_user,cn=users,cn=accounts,dc=local,dc=shiftsystems,dc=net'
|
||||||
|
SEMAPHORE_LDAP_PASSWORD: 'ldap_bind_account_password'
|
||||||
|
SEMAPHORE_LDAP_DN_SEARCH: 'dc=local,dc=example,dc=com'
|
||||||
|
SEMAPHORE_LDAP_SEARCH_FILTER: "(\u0026(uid=%s)(memberOf=cn=ipausers,cn=groups,cn=accounts,dc=local,dc=example,dc=com))"
|
||||||
|
depends_on:
|
||||||
|
- mysql # for postgres, change to: postgres
|
||||||
|
deploy:
|
||||||
|
mode: replicated
|
||||||
|
replicas: 1
|
||||||
|
volumes:
|
||||||
|
semaphore-mysql: # to use postgres, switch to: semaphore-postgres
|
||||||
|
driver: gfs_vol
|
||||||
|
semaphore_config:
|
||||||
|
driver: gfs_vol
|
||||||
35
DockerStack/Dockers/Shepherd/shepherd-compose.yaml
Normal file
35
DockerStack/Dockers/Shepherd/shepherd-compose.yaml
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
version: "3"
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
image: containrrr/shepherd
|
||||||
|
environment:
|
||||||
|
TZ: 'Europe/Copenhagen'
|
||||||
|
FILTER_SERVICES: ''
|
||||||
|
IGNORELIST_SERVICES: ''
|
||||||
|
RUN_ONCE_AND_EXIT: "true"
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
|
deploy:
|
||||||
|
replicas: 0
|
||||||
|
restart_policy:
|
||||||
|
condition: none
|
||||||
|
labels:
|
||||||
|
- swarm.cronjob.enable=true
|
||||||
|
- swarm.cronjob.schedule=0 1 * * *
|
||||||
|
- swarm.cronjob.skip-running=true
|
||||||
|
placement:
|
||||||
|
constraints:
|
||||||
|
- node.role == manager
|
||||||
|
|
||||||
|
scheduler:
|
||||||
|
image: crazymax/swarm-cronjob:latest
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
|
environment:
|
||||||
|
- "TZ=Europe/Copenhagen"
|
||||||
|
- "LOG_LEVEL=info"
|
||||||
|
- "LOG_JSON=false"
|
||||||
|
deploy:
|
||||||
|
placement:
|
||||||
|
constraints:
|
||||||
|
- node.role == manager
|
||||||
@@ -1,18 +1,33 @@
|
|||||||
|
version: "3"
|
||||||
services:
|
services:
|
||||||
vaultwarden:
|
vaultwarden:
|
||||||
image: vaultwarden/server:latest-alpine
|
image: vaultwarden/server:latest-alpine
|
||||||
container_name: vaultwarden
|
container_name: vaultwarden
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
|
||||||
DOMAIN: "vw.aveosysarch.org"
|
|
||||||
volumes:
|
|
||||||
- vaultwarden_data:/data/
|
|
||||||
ports:
|
ports:
|
||||||
- 30080:80
|
- 30080:80 #map any custom port to use (replace 9445 not 80)
|
||||||
|
volumes:
|
||||||
|
- vaultwarden_data:/data:rw
|
||||||
|
environment:
|
||||||
|
# - ROCKET_TLS={certs="/ssl/certs/certs.pem",key="/ssl/private/key.pem"} // Environment variable is specific to the Rocket web server
|
||||||
|
- ADMIN_TOKEN='!X%%R48RhN9g^hWVT^e7h#w@of'
|
||||||
|
- WEBSOCKET_ENABLED=true
|
||||||
|
# - SIGNUPS_ALLOWED=false
|
||||||
|
# - SMTP_HOST=${SMTP_HOST}
|
||||||
|
# - SMTP_FROM=${SMTP_FROM}
|
||||||
|
# - SMTP_PORT=${SMTP_PORT}
|
||||||
|
# - SMTP_SSL=${SMTP_SSL}
|
||||||
|
# - SMTP_USERNAME=${SMTP_USERNAME}
|
||||||
|
# - SMTP_PASSWORD=${SMTP_PASSWORD}
|
||||||
|
- DOMAIN=https://vw.aveosysarch.org
|
||||||
deploy:
|
deploy:
|
||||||
mode: replicated
|
mode: replicated
|
||||||
replicas: 1
|
replicas: 1
|
||||||
|
#uncomment below network part if you are using Nginx Proxy Manager, or you can remove the same
|
||||||
|
#networks:
|
||||||
|
# default:
|
||||||
|
# external:
|
||||||
|
# name: nginx-proxy-network
|
||||||
volumes:
|
volumes:
|
||||||
vaultwarden_data:
|
vaultwarden_data:
|
||||||
driver: gfs_vol
|
driver: gfs_vol
|
||||||
45
TicTacToe/TicTacToe.py
Normal file
45
TicTacToe/TicTacToe.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# Tic Tac Toe game in Python
|
||||||
|
|
||||||
|
board = [' ' for _ in range(9)]
|
||||||
|
|
||||||
|
def print_board():
|
||||||
|
row1 = '| {} | {} | {} |'.format(board[0], board[1], board[2])
|
||||||
|
row2 = '| {} | {} | {} |'.format(board[3], board[4], board[5])
|
||||||
|
row3 = '| {} | {} | {} |'.format(board[6], board[7], board[8])
|
||||||
|
|
||||||
|
print()
|
||||||
|
print(row1)
|
||||||
|
print(row2)
|
||||||
|
print(row3)
|
||||||
|
print()
|
||||||
|
|
||||||
|
def check_win():
|
||||||
|
win_conditions = [(0, 1, 2), (3, 4, 5), (6, 7, 8), (0, 3, 6), (1, 4, 7), (2, 5, 8), (0, 4, 8), (2, 4, 6)]
|
||||||
|
for condition in win_conditions:
|
||||||
|
if board[condition[0]] == board[condition[1]] == board[condition[2]] != ' ':
|
||||||
|
return board[condition[0]]
|
||||||
|
if ' ' not in board:
|
||||||
|
return 'Tie'
|
||||||
|
return False
|
||||||
|
|
||||||
|
def game():
|
||||||
|
current_player = 'X'
|
||||||
|
|
||||||
|
while True:
|
||||||
|
print_board()
|
||||||
|
move = input("Player {}, enter your move (1-9): ".format(current_player))
|
||||||
|
if board[int(move) - 1] == ' ':
|
||||||
|
board[int(move) - 1] = current_player
|
||||||
|
result = check_win()
|
||||||
|
if result:
|
||||||
|
print_board()
|
||||||
|
if result == 'Tie':
|
||||||
|
print("It's a tie!")
|
||||||
|
else:
|
||||||
|
print("Player {} wins!".format(result))
|
||||||
|
break
|
||||||
|
current_player = 'O' if current_player == 'X' else 'X'
|
||||||
|
else:
|
||||||
|
print("Invalid move, try again.")
|
||||||
|
|
||||||
|
game()
|
||||||
Reference in New Issue
Block a user