???? ??? ???? ???? ???? ???? ?? ??? ???? ??? ????? ???? ???? ???? ?????. ???? ????? ?? ??? ???? ???? ?? ??? ??? ??????. ???? AI? ?? ? ? RAG (Recreved-Augmented Generation) ???? ?? ??? ?? ??? ??? ???? ?? ???? ????? ???? ?? ????? ??? ?????. ? ??? ????? ??? ??? ????? ???? ?? ??, ?? ? ??? ?? ???? ???? ??? ????? ?? ? ????.
?? ??
? JSON ???? ?? ? ???? URL, ?? ? ??? ???? ????. ? ??? ? ???? ?? RAG ???? ??? ?? ? ?? ??? ?? ?? ??? ??? ? ????. ??? ?? ??? ?? ??? ????. ???? ???? ?? ??? ??? ??????
?? ? ?? :
2 ?? : ?? ??? ??? ??
3 ?? : ??? ?? AI ????? ???? ?????
??? ??? ?????. ? ?? ??? ???? ???? ??? ?? ? ? ??? ?? ??? ?????.
?? ? ??? ?? ??? ?????
top_p :
?? AI ??? ???? ???? ??? ? ??? ???? ?????. ???? ??? ???? ???? ?? ?? ??? ???? ???? ???? ?? ?? ??? ????????. ?? ? ?? : ??? ??? ???? ????? ??? ? ??? ??? ?????. ?? ???? ?? ??? ?? ?? ????? ??? ??? ???? ??? ???? ?????. ??? ?? ??? ?? ??? ???, ???? ???? ?? ??? ??? ????????.
5 ?? : ?? ?? ? ??? ??? ??
???? ?? ???? ??? ?? ???? ??? ?? ??? ??? ???? ?? ????. ?? ?? ????? ???? ?? ??? ??? ?? ????? ???? ????????.
?? ? ?? :
??
?? ??? ?? ? ? ?? ? JSON ???? ? ??? ?? ?? ? ???? ?????. Fields PM_Points? ?? ??, ??, ?? ?? ?? ?? ??? ??? ? ??? ?????.
??? ?? ?? ? ?? ??? ?? ??
?????, RAG? ???? ??? ??? ???? ?? ??? ???? ???? ??? ??? ???? ?? ? ? ????. ??? ?? ??? ?? ??? ?? ?? ??? ???? ??, ?? ? ?? ? ??? ???? ???? ???? ? ??? ??? ??? ? ??? ?????. ? ??? ??? ?? ????? ???? ?????? ?? ??? ? ? ???, ??? ?? ??? ????? ?? ??? ?????? ?? ??? ?? ??? ??? ? ??? ? ? ????.
??
???? ??? ?? ???? ????? ?? ?? ????? ?? ??? ???? ???? ?? ?????. RAG (Resprieved-Augmented Generation) ???? ??? ????, ??? ?? ? ??? ??? ???? ?? ???????? ??? ???? ????. Rag Systems? ?? ??? ???? ???, ?? ? ??? ???????? ???? ??? ?? ??? ???? ????? ???? ???? ?? ??? ?? ??? ?? ? ??? ?????.
? ??? ??? ?? ??? ??
BFS? ??? ??? ?? ? ???? ?? ?? AI ????
? ?? ??? ???? ??? ?? ?? ?? ?? ??? ?? ??
?? ??? ???? ??? ??? ????
??
import requests
from bs4 import BeautifulSoup
from collections import deque
# Function to extract links using BFS
def bfs_link_extraction(start_url, max_depth=3):
visited = set() # To track visited links
queue = deque([(start_url, 0)]) # Queue to store URLs and current depth
all_links = []
while queue:
url, depth = queue.popleft()
if depth > max_depth:
continue
# Fetch the content of the URL
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
# Extract all links in the page
links = soup.find_all('a', href=True)
for link in links:
full_url = link['href']
if full_url.startswith('http') and full_url not in visited:
visited.add(full_url)
queue.append((full_url, depth + 1))
all_links.append(full_url)
except requests.exceptions.RequestException as e:
print(f"Error fetching {url}: {e}")
return all_links
# Start the BFS from the homepage
start_url = 'https://www.example.com' # Replace with the actual homepage URL
all_extracted_links = bfs_link_extraction(start_url)
print(f"Extracted {len(all_extracted_links)} links.")
import requests
from bs4 import BeautifulSoup
from collections import deque
# Function to extract links using BFS
def bfs_link_extraction(start_url, max_depth=3):
visited = set() # To track visited links
queue = deque([(start_url, 0)]) # Queue to store URLs and current depth
all_links = []
while queue:
url, depth = queue.popleft()
if depth > max_depth:
continue
# Fetch the content of the URL
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
# Extract all links in the page
links = soup.find_all('a', href=True)
for link in links:
full_url = link['href']
if full_url.startswith('http') and full_url not in visited:
visited.add(full_url)
queue.append((full_url, depth + 1))
all_links.append(full_url)
except requests.exceptions.RequestException as e:
print(f"Error fetching {url}: {e}")
return all_links
# Start the BFS from the homepage
start_url = 'https://www.example.com' # Replace with the actual homepage URL
all_extracted_links = bfs_link_extraction(start_url)
print(f"Extracted {len(all_extracted_links)} links.")
?? ? ?? :
? ???? input_text ???? ?? ??? ?? AI ??? ?? ?? ??? ???? ???? ????. ? ??? ???? ?? ??? ?? ??? ???? ?? ?????. ???, ??? GROQ? API ??? ???? input_text? ??? ??? ????? ??? ??? ????. ??? ???? ??? ??? ?? AI ??? ???? ??? ???? ?? ? ???? ?????. ??? ??, max_tokens ? top_p ?? ??? ???? ?? ? ??? ????? ??? ?????.
API ?? ?? : <b>
</b>
Extracted 1500 links.
? ??? ???? ?????. ??? ???? ? ??? ? ??? ?? ?? ??, ? ?? ?? ? ?? ???? ????. import json
# Function to scrape and extract data from the URLs
def scrape_data_from_links(links):
scraped_data = []
for link in links:
try:
response = requests.get(link)
soup = BeautifulSoup(response.content, 'html.parser')
# Example: Extract 'title' and 'content' (modify according to your needs)
title = soup.find('title').get_text()
content = soup.find('div', class_='content').get_text() # Adjust selector
# Store the extracted data
scraped_data.append({
'url': link,
'title': title,
'content': content
})
except requests.exceptions.RequestException as e:
print(f"Error scraping {link}: {e}")
return scraped_data
# Scrape data from the extracted links
scraped_contents = scrape_data_from_links(all_extracted_links)
# Save scraped data to a JSON file
with open('/content/scraped_data.json', 'w') as outfile:
json.dump(scraped_contents, outfile, indent=4)
print("Data scraping complete.")
4 ?? : ?? ?? ? ?? ?? import requests
from bs4 import BeautifulSoup
from collections import deque
# Function to extract links using BFS
def bfs_link_extraction(start_url, max_depth=3):
visited = set() # To track visited links
queue = deque([(start_url, 0)]) # Queue to store URLs and current depth
all_links = []
while queue:
url, depth = queue.popleft()
if depth > max_depth:
continue
# Fetch the content of the URL
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
# Extract all links in the page
links = soup.find_all('a', href=True)
for link in links:
full_url = link['href']
if full_url.startswith('http') and full_url not in visited:
visited.add(full_url)
queue.append((full_url, depth + 1))
all_links.append(full_url)
except requests.exceptions.RequestException as e:
print(f"Error fetching {url}: {e}")
return all_links
# Start the BFS from the homepage
start_url = 'https://www.example.com' # Replace with the actual homepage URL
all_extracted_links = bfs_link_extraction(start_url)
print(f"Extracted {len(all_extracted_links)} links.")
?? ? ?? :
? ??? ?? ? ???? ????? ???? ??? ?? ??? ? ? ????. ? ??? ??? ??? ? ???? ???? ?? ? ??? ???? ?????.
? ???? Langchain ?? ?? ??, ??? ? OpenAI ??? ???? ? ?????. JQ? ?? JSON ?????? Langchain? ?? ?? ??? ??? ???? ?? ??? ?? ??????. Langchain-Openai? GPT? ?? OpenAI ??? ??? ?????? Langchain-Chroma? ?? ???? ?????? ??? ?? ?? ???? ?????.
? ????? ??? ?? ? ??? (??? ?? ??, ?? ? ?? ??)? ?? ??? ?????????.
3 ?? : ??? ?? ???? ??
import requests
from bs4 import BeautifulSoup
from collections import deque
# Function to extract links using BFS
def bfs_link_extraction(start_url, max_depth=3):
visited = set() # To track visited links
queue = deque([(start_url, 0)]) # Queue to store URLs and current depth
all_links = []
while queue:
url, depth = queue.popleft()
if depth > max_depth:
continue
# Fetch the content of the URL
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
# Extract all links in the page
links = soup.find_all('a', href=True)
for link in links:
full_url = link['href']
if full_url.startswith('http') and full_url not in visited:
visited.add(full_url)
queue.append((full_url, depth + 1))
all_links.append(full_url)
except requests.exceptions.RequestException as e:
print(f"Error fetching {url}: {e}")
return all_links
# Start the BFS from the homepage
start_url = 'https://www.example.com' # Replace with the actual homepage URL
all_extracted_links = bfs_link_extraction(start_url)
print(f"Extracted {len(all_extracted_links)} links.")
import requests
from bs4 import BeautifulSoup
from collections import deque
# Function to extract links using BFS
def bfs_link_extraction(start_url, max_depth=3):
visited = set() # To track visited links
queue = deque([(start_url, 0)]) # Queue to store URLs and current depth
all_links = []
while queue:
url, depth = queue.popleft()
if depth > max_depth:
continue
# Fetch the content of the URL
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
# Extract all links in the page
links = soup.find_all('a', href=True)
for link in links:
full_url = link['href']
if full_url.startswith('http') and full_url not in visited:
visited.add(full_url)
queue.append((full_url, depth + 1))
all_links.append(full_url)
except requests.exceptions.RequestException as e:
print(f"Error fetching {url}: {e}")
return all_links
# Start the BFS from the homepage
start_url = 'https://www.example.com' # Replace with the actual homepage URL
all_extracted_links = bfs_link_extraction(start_url)
print(f"Extracted {len(all_extracted_links)} links.")
Extracted 1500 links.
??? : ???? ?? ? ?? ??? ?? ???? ?????. ? ???? ?? ?? ?? ????? ? 4 ?? ???? ??? ? ????. ??? ??, ?? ? ??? ????????!
? ??? ???? ??? ?? ????? ?? ??? ??? ?? ?????. ??? ??? PHP ??? ????? ?? ?? ??? ?????!

? AI ??

Undress AI Tool
??? ???? ??

Undresser.AI Undress
???? ?? ??? ??? ?? AI ?? ?

AI Clothes Remover
???? ?? ???? ??? AI ?????.

Clothoff.io
AI ? ???

Video Face Swap
??? ??? AI ?? ?? ??? ???? ?? ???? ??? ?? ????!

?? ??

??? ??

???++7.3.1
???? ?? ?? ?? ???

SublimeText3 ??? ??
??? ??, ???? ?? ????.

???? 13.0.1 ???
??? PHP ?? ?? ??

???? CS6
??? ? ?? ??

SublimeText3 Mac ??
? ??? ?? ?? ?????(SublimeText3)

??? ??











Google? Notebooklm? Gemini 2.5? ???? ??? AI ?? ??? ??? ??? ???? ? ?????. ??? ?? ?, ???? ??? ? ?? "??"??? ?? ?? ???? ??? ??? ????.

LLMSORGANATIONS? ?? ??? ??? LLM? ?? ??? ?? ????? ??? 72%? ?? ??? ?? ? ??? ???? ???, ?? ??? ?? ? ??? ???? ????. ?? ?? 40% a

??? ?????? ?????? ???? ????. AI ?? ?? ??? ??? ???? ???? ?? ???? ?? ??? ??????. ??, ?? ?? ???? ??? ?? ? ????? ? ??? PR? ???? ??? ??? ????

?? : ?? ?? ? Tirias Research?? ???? ?? ? IBM, NVIDIA ? ?? ??? ?? ??????. ?? ?? ?? AI ??? ??? ?? ??? ? ???? ??? ??? ??? ? ???????. ?? ?? a

? ??? AI ??? ??? ?????. ?? ??? Kayak ? Edtech ?? ? Chegg? ?? ????? ?? ???? ????? Google? ?? ???? ??? 60%? ???? ??? ???? ?? ??? ???? ????.

???? ??? ??? ?? ??? ??? ??? ?? ? ?? ??? ???? ??? ???? ???? ?? ???? ?? ?????. ??? ????? ??? ?? ???? ?? ?? ??? ???? ????.

??? ?? ??????. ???? AI ??? ??? ??? ??? ??? AI ???? ???? ???? ?? ???? AI? ?? Forbes ? ??? ????? (?? ?? ??). AGI? ??? ????

?? ?? ??? ?? ?? ?? ??? ???? Cisco? ??? ?? ????? ??? ??? ??? ?? ? ? ???? ??? ?? ?????. (?? : Cisco
