diff --git a/python/introduction/README.md b/python/introduction/README.md new file mode 100644 index 0000000..9152346 --- /dev/null +++ b/python/introduction/README.md @@ -0,0 +1,16 @@ +## introduction +``` +variables +list --> bisous.py programming.py +input --> bisous.py +print +for loop --> bisous.py +conditional statements if/else/elif --> bisous.py +break --> bisous.py +while loop --> missing.py +dictionary +enumerate +return --> missing.py +random --> programming.py +function --> missing.py +``` diff --git a/python/introduction/bisous.py b/python/introduction/bisous.py new file mode 100644 index 0000000..d04edd0 --- /dev/null +++ b/python/introduction/bisous.py @@ -0,0 +1,19 @@ +# Initialiser les variables +queer = "mon amour" +bisous = ["ma biche", "mom bébé", "mon amour", "mon chéri.e"] + +# Demander une saisie à l'utilisateur +amoureuxse = input("Entrez le nom de votre bien-aiméx : ") + +# Boucler à travers la liste et imprimer le message correspondant +for bisou in bisous: + if bisou == queer: + print("bisou pour toi", bisou, amoureuxse) + + elif amoureuxse == "python": + print("on dirait un.e geek") + break + + else: + print(f":* :* {bisou}, {amoureuxse}") + diff --git a/python/introduction/missing.py b/python/introduction/missing.py new file mode 100644 index 0000000..565be8c --- /dev/null +++ b/python/introduction/missing.py @@ -0,0 +1,12 @@ +from time import sleep + +love = True +how = "so" + +def missing(so): + print(f"I miss you {so} much") + +while love: + missing(how) + how += " so" + sleep(0.2) diff --git a/python/introduction/programming.py b/python/introduction/programming.py new file mode 100644 index 0000000..dbf4630 --- /dev/null +++ b/python/introduction/programming.py @@ -0,0 +1,25 @@ +""" +poem converted from bash programming.sh by Winnie Soon, modified from The House of Dust, 1967 Alison Knowles and James Tenney +""" +import random +import time + +# listes for different elements +kisses = ["DEAREST", "SWEETHEART", "WORLD", "DARLING", "BABY", "LOVE", "MONKEY", "SUGAR", "LITTLE PRINCE"] +material = ["SAND", "DUST", "LEAVES", "PAPER", "TIN", "ROOTS", "BRICK", "STONE", "DISCARDED CLOTHING", "GLASS", "STEEL", "PLASTIC", "MUD", "BROKEN DISHES", "WOOD", "STRAW", "WEEDS", "FOREST"] +location = ["IN A GREEN, MOSSY TERRAIN", "IN AN OVERPOPULATED AREA", "BY THE SEA", "BY AN ABANDONED LAKE", "IN A DESERTED FACTORY", "IN DENSE WOODS", "IN JAPAN", "AMONG SMALL HILLS", "IN SOUTHERN FRANCE", "AMONG HIGH MOUNTAINS", "ON AN ISLAND", "IN A COLD, WINDY CLIMATE", "IN A PLACE WITH BOTH HEAVY RAIN AND BRIGHT SUN", "IN A DESERTED AIRPORT", "IN A HOT CLIMATE", "INSIDE A MOUNTAIN", "ON THE SEA", "IN MICHIGAN", "IN HEAVY JUNGLE UNDERGROWTH", "BY A RIVER", "AMONG OTHER HOUSES", "IN A DESERTED CHURCH", "IN A METROPOLIS", "UNDERWATER", "ON THE SCREEN", "ON THE ROAD"] +light_source = ["CANDLES", "ALL AVAILABLE LIGHTING", "ELECTRICITY", "NATURAL LIGHT", "LEDS", "MOON LIGHT", "THE SMALL TORCH"] +inhabitants = ["PEOPLE WHO SLEEP VERY LITTLE", "VEGETARIANS", "HORSES AND BIRDS", "PEOPLE SPEAKING MANY LANGUAGES WEARING LITTLE OR NO CLOTHING", "CHILDREN AND OLD PEOPLE", "VARIOUS BIRDS AND FISH", "LOVERS", "PEOPLE WHO ENJOY EATING TOGETHER", "PEOPLE WHO EAT A GREAT DEAL", "COLLECTORS OF ALL TYPES", "FRIENDS AND ENEMIES", "PEOPLE WHO SLEEP ALMOST ALL THE TIME", "VERY TALL PEOPLE", "AMERICAN INDIANS", "LITTLE BOYS", "PEOPLE FROM MANY WALKS OF LIFE", "FRIENDS", "FRENCH AND GERMAN SPEAKING PEOPLE", "FISHERMEN AND FAMILIES", "PEOPLE WHO LOVE TO READ", "CHEERFUL KIDS", "QUEER LOVERS", "NAUGHTY MONKEYS", "KIDDOS"] + +# Infinite loop +while True: + print("HELLO", random.choice(kisses)) + print(" A TERMINAL OF BLACK", random.choice(material)) + print(" ", random.choice(location)) + print(" PROGRAMMING", random.choice(light_source)) + print(" KISSED BY", random.choice(inhabitants)) + print(" ") + + # Delay for 3.5 seconds + time.sleep(3.5) + diff --git a/python/scrape/README.md b/python/scrape/README.md index 8c5c3b7..edd9e12 100644 --- a/python/scrape/README.md +++ b/python/scrape/README.md @@ -1,12 +1,16 @@ -## A script that scrapes images from a given URL -we need to use the requests and BeautifulSoup libraries to retrieve and parse the HTML content. `os` and `shutil` are helpful for managing files and saving the images. Scraping should be done ethically, following the website's robots.txt rules and terms of service. +## Un script qui extrait des images depuis une URL donnée +Nous devons installer: ``` pip install requests beautifulsoup4 tldextract ``` -Run the script with: +Exécutez le script avec : ``` -python cyberfeminist_images.py +python get_images.py https://www.freepik.com/images ``` +Remplacez l’URL par le lien que vous souhaitez extraire. +**Remarque:** Le scraping doit être effectué de manière éthique, en respectant les règles du fichier robots.txt et les conditions d'utilisation du site. + +