notepad 1 mese fa
parent
commit
f2914929d4
85 ha cambiato i file con 36400 aggiunte e 59 eliminazioni
  1. 15 59
      .gitignore
  2. 21 0
      common/common_data.py
  3. 44 0
      common/split_text.py
  4. 17 0
      config/read_config.py
  5. 58 0
      core/api_get_article.py
  6. 44 0
      core/api_get_article2.py
  7. 39 0
      core/api_get_article3.py
  8. 60 0
      core/api_get_audio.py
  9. 34 0
      core/api_get_spoken_language.py
  10. 42 0
      core/api_get_word.py
  11. 116 0
      core/api_routes_jwt.py
  12. 37 0
      core/respone_format.py
  13. 28579 0
      data/all_exchange_words.txt
  14. 62 0
      data/get_all_exchange_words.py
  15. 18 0
      data/get_frequency_script.py
  16. 0 0
      data/json_word_frequency.json
  17. BIN
      data/单词词义表.xlsx
  18. BIN
      data/春笋单词对照变形.xlsx
  19. 110 0
      deepseek/ds_api.py
  20. 271 0
      deepseek/get_article3.py
  21. 112 0
      gpt/chatgpt.py
  22. 570 0
      gpt/get_article.py
  23. 271 0
      gpt/get_article2.py
  24. 103 0
      gpt/gpt.py
  25. 343 0
      gpt/gpt_check.py
  26. 31 0
      gpt/query_oss_file.py
  27. 68 0
      main.py
  28. 68 0
      main_9000.py
  29. 16 0
      make_docx_demo/check_test_table/aaaaaaaaaa.py
  30. 39 0
      make_docx_demo/check_test_table/baidu_ocr.py
  31. 4 0
      make_docx_demo/check_test_table/check_table.py
  32. 388 0
      make_docx_demo/check_test_table/image_preprocess.py
  33. 363 0
      make_docx_demo/check_test_table/image_preprocess2.py
  34. 0 0
      make_docx_demo/check_test_table/log.txt
  35. 42 0
      make_docx_demo/check_test_table/mark_ocr_loca.py
  36. BIN
      make_docx_demo/check_test_table/output_with_rectangles.jpg
  37. BIN
      make_docx_demo/check_test_table/sharpen_image.jpg
  38. BIN
      make_docx_demo/check_test_table/template.jpg
  39. BIN
      make_docx_demo/check_test_table/transformed_image.jpg
  40. 4 0
      make_docx_demo/data.py
  41. 146 0
      make_docx_demo/docx_other_func.py
  42. 30 0
      make_docx_demo/get_standard_data.py
  43. 1166 0
      make_docx_demo/main_word.py
  44. 1162 0
      make_docx_demo/main_word_applet.py
  45. 41 0
      make_docx_demo/new_word2pdf.py
  46. BIN
      make_docx_demo/static/2.jpg
  47. BIN
      make_docx_demo/static/baidu_qrcode.png
  48. BIN
      make_docx_demo/static/chart.png
  49. BIN
      make_docx_demo/static/happy_word.jpg
  50. BIN
      make_docx_demo/static/lianxi1.jpg
  51. BIN
      make_docx_demo/static/lianxi2.jpg
  52. BIN
      make_docx_demo/static/lianxi3.jpg
  53. BIN
      make_docx_demo/static/lianxi4.jpg
  54. BIN
      make_docx_demo/static/lianxi5.jpg
  55. BIN
      make_docx_demo/static/lianxi6.jpg
  56. BIN
      make_docx_demo/static/lianxi7.jpg
  57. BIN
      make_docx_demo/static/lianxi8.jpg
  58. BIN
      make_docx_demo/static/line.jpg
  59. BIN
      make_docx_demo/static/line_example.png
  60. BIN
      make_docx_demo/static/logo.png
  61. BIN
      make_docx_demo/static/logo2.png
  62. BIN
      make_docx_demo/static/pen.png
  63. BIN
      make_docx_demo/static/qr_code.jpg
  64. BIN
      make_docx_demo/static/qr_code.png
  65. BIN
      make_docx_demo/static/t1.jpg
  66. BIN
      make_docx_demo/static/首页示意图.jpg
  67. BIN
      make_docx_demo/static/首页示意图2.jpg
  68. 63 0
      make_docx_demo/word2pdf.py
  69. 227 0
      make_docx_demo/word_component/make_rectangle.py
  70. 180 0
      mock/mock_request.py
  71. 1 0
      spoken_language/common/__init__.py
  72. 6 0
      spoken_language/common/credential.py
  73. 7 0
      spoken_language/common/utils.py
  74. 8 0
      spoken_language/convert_tts.py
  75. 20 0
      spoken_language/read_config.py
  76. 0 0
      spoken_language/soe/__init__.py
  77. 275 0
      spoken_language/soe/speaking_assessment.py
  78. 155 0
      spoken_language/soeexample.py
  79. 85 0
      tools/ali_log.py
  80. 251 0
      tools/audio.py
  81. 48 0
      tools/del_expire_file.py
  82. 105 0
      tools/loglog.py
  83. 180 0
      tools/new_mysql.py
  84. 250 0
      tools/sql_format.py
  85. 5 0
      tools/thread_pool_manager.py

+ 15 - 59
.gitignore

@@ -1,60 +1,16 @@
-# ---> Python
-# Byte-compiled / optimized / DLL files
+.*
+/*.txt
+test*.py
+/test
+/log
+*.docx
+*.pdf
+log/
+*.yaml
 __pycache__/
-*.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-env/
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-*.egg-info/
-.installed.cfg
-*.egg
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*,cover
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
+*.pyc
+*.pyo
+*.pyd
+*.pyw
+*.pyz
+*.pywz

+ 21 - 0
common/common_data.py

@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+import os
+import json
+
+SECRET_KEY = os.getenv("key")
+
+try:
+    with open("data/json_word_frequency.json","r",encoding="utf-8") as f:
+        word_frequency = json.loads(f.read())
+except FileNotFoundError: 
+    with open(r"C:\Users\pan\Desktop\demo\qback\data\json_word_frequency.json", "r", encoding="utf-8") as f:
+        word_frequency = json.loads(f.read())
+
+
+all_json_words_set = {word for key,word in word_frequency.items()}
+
+
+with open("data/all_exchange_words.txt","r",encoding="utf-8") as f:
+    all_exchange_words = set(f.read().split("\n"))
+
+

+ 44 - 0
common/split_text.py

@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+import re
+
+def split_text_to_word(text:str):
+    words_list = re.findall(r'\b[-\'\w]+\b',text)
+    return words_list
+
+def get_article_words_count(text:str):
+    return len(split_text_to_word(text))
+
+
+def split_text_to_sentences(text:str) -> list:
+    sentences = re.split(r'(?<=[.!?;])', text) 
+    sentences = [i for i in sentences if i.replace(" ", "")]
+    return sentences
+
+
+def split_text_to_word_punctuation(text:str):
+    word_punctuation_list = re.findall(r'\b[-\'\w]+\b|[^\w\s]|\n',text)
+    return word_punctuation_list
+
+def is_word(single_word:str,strict:bool=False):
+    """strict 严格模式,默认不开。严格模式下,每个实体字符必须是字母。全部都是字母才算是单词
+    非严格模式下,有一个字母就算是单词。即使是 op123
+    """
+    single_word = single_word.strip()
+    if strict:
+        r = all([re.search(r'[a-zA-Z]', char_) for char_ in single_word if char_])
+        if r:
+            return True
+        return False
+
+    if re.search(r'[a-zA-Z]', single_word):
+        return True
+    return False
+
+
+if __name__ == '__main__':
+
+    a = "fdh fgdhf fgd-y i'am a student.gfddfgfd dfhgfd ! fdgh,fdgh fght. 3.1415"
+   
+   
+   
+    print(is_word("student34",strict=True))

+ 17 - 0
config/read_config.py

@@ -0,0 +1,17 @@
+# -*- coding:utf-8 -*-
+import yaml
+
+
+def read_config():
+   
+    with open("config/env.yaml", "r",encoding="utf-8") as file:
+        config = yaml.safe_load(file)
+        return config
+
+address = "https://dcjxb.yunzhixue.cn" if read_config()['env']=='product' else "http://dcjxbtest.yunzhixue.cn"
+
+
+if __name__ == '__main__':
+    import os
+    os.chdir(r'C:\Users\86131\Desktop\demo\ai_qback')
+    print(read_config()['env'])

+ 58 - 0
core/api_get_article.py

@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+from fastapi import FastAPI, Form, HTTPException, Request,status,APIRouter,Query,Path
+from tools.loglog import logger
+from gpt.get_article import GetArticle
+from gpt.query_oss_file import query_file_content
+from core.respone_format import *
+
+from pydantic import BaseModel, ValidationError, conint
+from typing import List, Optional
+
+
+router = APIRouter()
+get_article = GetArticle()
+
+
+class ArticleRequest(BaseModel):
+    meaning_ids: List[conint(ge=1)] 
+    callback_url: Optional[str] = None 
+    demo_name: Optional[str] = "无" 
+    student_stage: Optional[int] = 1 
+    vocabulary: Optional[int] = 500 
+    class_id :Optional[int]
+
+@router.post("/article")
+def post_article(json_data:ArticleRequest,request:Request):
+    real_ip = request.headers.get("X-Real-IP","localhost")
+    words_meaning_ids: list = json_data.meaning_ids
+    callback_url = json_data.callback_url
+    demo_name = json_data.demo_name
+    student_stage = json_data.student_stage
+    vocabulary = json_data.vocabulary
+    class_id = json_data.class_id 
+
+    try:
+        if not words_meaning_ids:
+            return resp_404(message="没有词义id")
+
+        r = get_article.submit_task(words_meaning_ids=words_meaning_ids,callback_url=callback_url,
+                                    real_ip=real_ip,demo_name=demo_name,
+                                    student_stage=student_stage,vocabulary=vocabulary,class_id=class_id)
+        return r if not isinstance(r,str) else resp_500(message=r)
+
+    except Exception as e:
+        logger.error(f"{type(e).__name__},{e}")
+        return resp_500(message=f"{type(e).__name__},{e}")
+
+
+@router.post("/query_oss_file")
+def query_oss_file(json_data:dict,request:Request):
+    oss_key = json_data.get("key")
+
+    if not oss_key:
+        return resp_500(message="请提供key值")
+    j = query_file_content(key=oss_key)
+    if j == 0:
+        return resp_500(message="错误:没有这个文件")
+    return JSONResponse(j)
+

+ 44 - 0
core/api_get_article2.py

@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+from fastapi import FastAPI, Form, HTTPException, Request,status,APIRouter,Query,Path
+from tools.loglog import logger,log_err_e
+
+from core.respone_format import *
+from gpt.get_article2 import GetArticle
+from pydantic import BaseModel, ValidationError, conint,Field
+from typing import List, Optional,Literal
+
+
+router = APIRouter()
+get_article = GetArticle()
+
+class Word(BaseModel):
+    meaning_id:int = Field(..., description="单词的词义id")
+    word_id:int = Field(..., description="单词id")
+    spell: str = Field(..., description="单词的拼写")
+    meaning: str = Field(..., description="单词的意思")
+
+
+class ArticleRequest(BaseModel):
+    core_words: List[Word] = Field(..., description="单词列表")
+    extend_words: List[Word] = Field(..., description="单词列表")
+    take_count: int = 2 
+    student_stage: Literal[1, 2, 3] 
+    demo_name: Optional[str] = "无" 
+    reading_level: int = Field(default=-1, description="阅读水平,默认值为-1")
+
+
+@router.post("/article/reading-comprehension")
+def post_article(json_data:ArticleRequest,request:Request):
+    json_data = json_data.dict()
+    real_ip = request.headers.get("X-Real-IP","0.0.0.0")
+    core_words,extend_words,take_count,student_stage,demo_name = json_data["core_words"],json_data["extend_words"],json_data["take_count"],json_data["student_stage"],json_data["demo_name"]
+    reading_level = json_data.get("reading_level",-1) 
+
+    try:
+        r = get_article.submit_task(core_words=core_words,extend_words=extend_words, take_count=take_count,student_stage=student_stage,real_ip=real_ip,demo_name=demo_name)
+        return r if not isinstance(r,str) else resp_500(message=r)
+
+    except Exception as e:
+        log_err_e(e,msg="文章2接口错误/article/reading-comprehension;")
+        return resp_500(message=f"{type(e).__name__},{e}")

+ 39 - 0
core/api_get_article3.py

@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+
+from fastapi import FastAPI, Form, HTTPException, Request,status,APIRouter,Query,Path
+from tools.loglog import logger,log_err_e
+
+from core.respone_format import *
+from deepseek.get_article3 import GetArticle
+from pydantic import BaseModel, ValidationError, conint,Field
+from typing import List, Optional,Literal
+
+
+router = APIRouter(tags=['deepseek接口'])
+get_article = GetArticle()
+
+class Word(BaseModel):
+    spell: str = Field(..., description="单词的拼写")
+    meaning: str = Field(..., description="单词的意思")
+
+
+class ArticleRequest(BaseModel):
+    words: List[Word] = Field(..., description="单词列表")
+    take_count: int = 2 
+    student_stage: Literal[1, 2, 3] 
+    demo_name: Optional[str] = "无" 
+
+
+@router.post("/article/reading-comprehension/deepseek")
+def post_article(json_data:ArticleRequest,request:Request):
+    json_data = json_data.dict()
+    real_ip = request.headers.get("X-Real-IP")
+    words,take_count,student_stage,demo_name = json_data["words"],json_data["take_count"],json_data["student_stage"],json_data["demo_name"]
+
+    try:
+        r = get_article.submit_task(words_meaning_list=words, take_count=take_count,student_stage=student_stage,real_ip=real_ip,demo_name=demo_name)
+        return r if not isinstance(r,str) else resp_500(message=r)
+
+    except Exception as e:
+        log_err_e(e,msg="文章3,ds接口错误/article/reading-comprehension/deepseek;")
+        return resp_500(message=f"{type(e).__name__},{e}")

+ 60 - 0
core/api_get_audio.py

@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+from fastapi import FastAPI, Form, HTTPException, Request,status,APIRouter,Query,Path
+from fastapi.responses import StreamingResponse
+from tools.loglog import logger
+from tools.audio import GetAudio
+from core.respone_format import *
+import traceback
+
+router = APIRouter()
+get_audio = GetAudio()
+
+
+
+"""
+生成音频tts接口,传递两个参数,word和resp_type; word和resp_type回复设计:0返回oss路径,1 二进制文件,2 url三种;
+"""
+
+@router.get("/tts")
+def get_tts(word:str=Query(None, max_length=300)):
+    try:
+        f = get_audio.submit_task(word_or_phrase=word,resp_type=0) 
+        r = f.result()
+        if r:
+            return resp_200(data=r)
+        return resp_500(message="生成失败")
+
+    except Exception as e:
+        logger.error(f"{type(e).__name__},{e}")
+        return resp_500(message=f"{type(e).__name__},{e}")
+
+
+@router.post("/tts")
+def get_tts(json_data:dict,request:Request):
+   
+
+    word_or_phrase = json_data["text"]
+    resp_type = json_data.get("type")
+
+   
+    if len(word_or_phrase) >= 300:
+        logger.error(f"单词或短语过长")
+        return resp_400(message="单词或短语过长")
+    if resp_type not in [0,1,2]:
+        logger.error(f"type参数不是012")
+        return resp_400(message="type参数不是012")
+
+    try:
+        f = get_audio.submit_task(word_or_phrase=word_or_phrase,resp_type=resp_type)
+        r = f.result()
+        if r and resp_type in [0,2]:
+            return resp_200(data=r)
+        if r and resp_type == 1:
+            return StreamingResponse(content=r, media_type='audio/mpeg')
+        return resp_500(message="生成失败")
+
+    except Exception as e:
+        traceback_str = traceback.format_exc()
+        logger.error(traceback_str)
+        logger.error(f"{type(e).__name__},{e}")
+        return resp_500(message=f"{type(e).__name__},{e}")

+ 34 - 0
core/api_get_spoken_language.py

@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+from fastapi import FastAPI, Form, HTTPException, Request,status,APIRouter,Query,Path,UploadFile,File
+from tools.loglog import logger,log_err_e
+from spoken_language.soeexample import spoken_result,make_spoken
+from random import randint
+from core.respone_format import *
+
+
+router = APIRouter()
+
+
+@router.post("/spoken_language")
+async def post_article(request:Request,url=Form(""),file: UploadFile = File(None),text=Form(...)):
+
+    if not url and not file:
+        logger.error("错误:请上传mp3文件url参数或者二进制文件file参数")
+        return resp_404(message="错误:请上传mp3文件url参数或者二进制文件file参数")
+    try:
+        task_id = randint(10000,99999)
+       
+        if file:
+            file_content = await file.read()
+        else:
+            file_content = None
+        data:dict = make_spoken(task_id,url,file_content,text)
+        if data:
+            logger.success(f"完成spoken_language请求:{data}")
+           
+            return data
+    except Exception as e:
+        log_err_e(e,msg="口语评测接口")
+        return resp_500(message=f"{type(e).__name__},{e}")
+
+

+ 42 - 0
core/api_get_word.py

@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+from fastapi import FastAPI, Form, HTTPException, Request,status,APIRouter,Query,Path
+from fastapi.responses import FileResponse,PlainTextResponse
+from tools.loglog import logger
+from tools.audio import GetAudio
+from core.respone_format import *
+import traceback
+
+from make_docx_demo.main_word_applet import start_make_word as s2
+
+router = APIRouter()
+
+"""
+@router.post("/make_word")
+def make_word(json_data:dict,request:Request,
+            document_format:int=Query(1,description="1:docx;2.pdf"),
+            scanpage_format:int=Query(1,description="1:老版筛查表;2.新版筛查表;3.老版+新版筛查表")):
+    real_ip = request.headers.get("X-Real-IP")
+    logger.info(f"收到生成word请求:客户ip:{real_ip},查询参数:document_format {document_format},scanpage_format {scanpage_format},参数:{json_data}")
+
+   
+    headers = {"Content-Type": "application/octet-stream"}
+    if path := start_make_word(json_data, document_format, scanpage_format):
+        return FileResponse(path=path,headers=headers, media_type='application/octet-stream')
+    else:
+        return PlainTextResponse(status_code=500,content="服务器内部错误")
+"""
+
+
+
+@router.post("/make_word/vocabulary_assault")
+def make_word(json_data:dict,request:Request,
+            document_format:int=Query(1,description="1:docx;2.pdf"),
+            scanpage_format:int=Query(1,description="1:老版筛查表;2.新版筛查表;3.老版+新版筛查表")):
+
+   
+    headers = {"Content-Type": "application/octet-stream"}
+    if path := s2(json_data, document_format, scanpage_format):
+        return FileResponse(path=path,headers=headers, media_type='application/octet-stream')
+    else:
+        return PlainTextResponse(status_code=500,content="服务器内部错误")

+ 116 - 0
core/api_routes_jwt.py

@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+from fastapi import FastAPI, Form, HTTPException, Request,status,APIRouter
+import jwt
+from jwt.exceptions import ExpiredSignatureError,DecodeError,InvalidAlgorithmError
+from core.respone_format import *
+from tools.sql_format import UserCRUD
+import datetime
+import asyncio
+from tools.loglog import logger
+import traceback
+from common.common_data import SECRET_KEY
+
+router = APIRouter()
+user_crud = UserCRUD()
+
+
+def create_access_token(username: str):
+    payload = {
+        "exp": datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+        "iat": datetime.datetime.utcnow(),
+        "username": username
+    }
+    try:
+        encoded_jwt = jwt.encode(payload, SECRET_KEY, algorithm="HS256")
+        return encoded_jwt
+    except Exception as e:
+        logger.info(f"出错日志:创建token中 {payload}  秘钥{SECRET_KEY}")
+        logger.error(f"{traceback.format_exc()}")
+        logger.error(f"{type(e).__name__}, {e}")
+
+
+
+def verify_token_sync(token: str):
+    if not token:
+        return 1
+    try:
+        decoded_payload = jwt.decode(token, SECRET_KEY, algorithms=["HS256"])
+        if not decoded_payload.get("username"):
+            return 2
+        else:
+            return 0 
+    except ExpiredSignatureError:
+        return 3
+    except (InvalidAlgorithmError,DecodeError):
+        return 4
+
+
+async def verify_token(token: str):
+    loop = asyncio.get_event_loop()
+    return await loop.run_in_executor(None, verify_token_sync, token)
+
+async def verify_token2(token):
+    msg_verify_code = await verify_token(token)
+    if msg_verify_code != 0:
+        if msg_verify_code == 3:
+            return resp_401(message="The token has expired")
+
+        error_msg = {
+            1: "No token provided",
+            2: "Token lacks username",
+            4: "Token decoding error"
+        }.get(msg_verify_code, "Invalid token")
+        return resp_400(message=error_msg)
+    return 0
+
+
+@router.post("/user/login")
+async def get_token(username: str = Form(...), password: str = Form(...)):
+    user_info = user_crud.get_userinfo_by_account(username) 
+    if user_info:
+        userid, account, true_pwd, uname, create_time = user_info
+    else:
+        return resp_400(message="user does not exist")
+
+    if password==true_pwd:
+        access_token = create_access_token(username)
+        return_data = {"access_token": access_token}
+        return resp_200(data=return_data)
+    else:
+        return resp_400(message="Incorrect username or password")
+
+
+@router.get("/user")
+async def get_user(request:Request):
+   
+    token = request.headers.get("Authorization")
+    try:
+        decoded_payload = jwt.decode(token, SECRET_KEY, algorithms=["HS256"])
+        account = decoded_payload.get("username") 
+        user_info = user_crud.get_userinfo_by_account(account=account)
+        userid, account, true_pwd, uname, create_time = user_info
+        data = {"id":userid,"name":uname,"account":account,"create_time":create_time}
+        return resp_200(data=data)
+    except ExpiredSignatureError:
+        return resp_401(message="The token has expired")
+    except (InvalidAlgorithmError,DecodeError):
+        return resp_400(message="Token decoding error")
+    except Exception as e:
+        return resp_400(message=f"Error in get user information.{e}")
+
+
+@router.post("/user/logout")
+async def get_token(request:Request):
+    token = request.headers.get("Authorization")
+    try:
+        decoded_payload = jwt.decode(token, SECRET_KEY, algorithms=["HS256"])
+        account = decoded_payload.get("username") 
+        logger.info(f"账号:{account}注销成功")
+        data = {"result": "注销成功"}
+        return resp_200(data=data)
+    except ExpiredSignatureError:
+        return resp_401(message="The token has expired")
+    except (InvalidAlgorithmError, DecodeError):
+        return resp_400(message="Token decoding error")
+    except Exception as e:
+        return resp_400(message=f"User logout error.{e}")

+ 37 - 0
core/respone_format.py

@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+from fastapi import status
+from fastapi.responses import JSONResponse
+from typing import Union
+
+
+def resp_200(*, data: Union[list, dict, str]) -> JSONResponse:
+    return JSONResponse(
+        status_code=status.HTTP_200_OK,
+        content={"code": 200, "message": "success", "data": data}
+    )
+
+
+def resp_400(*, message: str = "Bad Request", data: Union[list, dict, str] = None) -> JSONResponse:
+    return JSONResponse(
+        status_code=status.HTTP_400_BAD_REQUEST,
+        content={"code": 400, "message": message, "data": data}
+    )
+
+def resp_401(*, message: str = "The token has expired", data: Union[list, dict, str] = None) -> JSONResponse:
+    return JSONResponse(
+        status_code=status.HTTP_401_UNAUTHORIZED,
+        content={"code": 401, "message": message, "data": data}
+    )
+
+def resp_404(*, message: str = "Not Found", data: Union[list, dict, str] = None) -> JSONResponse:
+    return JSONResponse(
+        status_code=status.HTTP_404_NOT_FOUND,
+        content={"code": 404, "message": message, "data": data}
+    )
+
+
+def resp_500(*, message: str = "Internal Server Error", data: Union[list, dict, str] = None) -> JSONResponse:
+    return JSONResponse(
+        status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+        content={"code": 500, "message": message, "data": data}
+    )

+ 28579 - 0
data/all_exchange_words.txt

@@ -0,0 +1,28579 @@
+bigger
+staying
+appalling
+shipments
+blot
+considerations
+chile
+instruct
+durable
+gulfs
+sexiest
+versions
+yanks
+launch
+gees
+psyche
+infantries
+tethers
+sternest
+infused
+patterns
+rejoice
+exaggerated
+graves
+incite
+polymers
+mango
+spices
+leak
+cedar
+portals
+alleviate
+core
+restrictions
+interpretive
+refines
+Number
+assortments
+humors
+possible
+trapped
+poll
+equations
+tumor
+self-determination
+am
+narrows
+swivelling
+inability
+Migrant
+asserts
+antiquities
+retrained
+shampoos
+col
+November
+fasten
+carrots
+introductory
+calmly
+restroom
+nailing
+asylum
+frustrates
+Cod
+starvation
+duties
+situational
+screens
+engineers
+narrower
+armed
+texting
+purpose
+seagulls
+canon
+cosmology
+specialty
+bicycle
+tiredness
+griefs
+stutter
+kip
+hoisting
+alternatives
+takeovers
+denies
+socket
+paradoxes
+trample
+rebellious
+intends
+wheels
+Royal
+stated
+buyers
+mischief
+garnish
+minorities
+combing
+strives
+talking
+ratio
+behaviors
+studio
+timidest
+Chronicles
+flux
+massacring
+confident
+ruining
+comer
+spotlight
+depart
+feel
+weighs
+freaks
+astronaut
+sunday
+consolidate
+positioning
+Shallot
+simply
+warehouse
+react
+backcountries
+abduct
+honored
+illustrator
+torments
+seeming
+exclaim
+swirl
+tours
+extra
+gamble
+vegetation
+print
+more harmful
+Haitian
+spited
+galas
+Masculinity
+equation
+most indifferent
+fetus
+sympathized
+shabby
+splendid
+inhibited
+damaged
+pastries
+repositories
+secular
+vaporize
+decision-making
+Either
+attempted
+greenest
+despairs
+facilitator
+busying
+qualified
+caverns
+companions
+Impressive
+Wizard
+funnelled
+purging
+haircut
+breasts
+infrared
+garnered
+mates
+descend
+blanket
+listed
+Care
+avalanches
+al-qaeda
+shadiest
+evenest
+partners
+relation
+drummers
+asphalted
+lineman
+conscious
+bruise
+fresher
+pieced
+Stranger
+Opposite
+cured
+concentrating
+propensity
+toe
+soaping
+dedications
+comic
+loom
+overdosing
+lengthy
+inflating
+rejoins
+haunting
+literally
+emerges
+interviewing
+acquaint
+stranding
+mistakens
+cleansed
+largely
+mince
+convert
+sane
+extracts
+lands
+ecologists
+shored
+responded
+erosion
+totalled
+conference
+spokesman
+microwaving
+asks
+seeped
+awake
+announcing
+stag
+turfing
+chained
+thousands
+sings
+Conquest
+pivots
+ambition
+stairways
+purged
+satire
+ruffled
+technically
+manufactures
+january
+rids
+lengthiest
+most graceful
+newsrooms
+archbishops
+comprehensive
+snuggest
+teens
+willpower
+memoir
+Proper
+welfare
+bacteria
+letters
+fails
+anxious
+sacraments
+walked
+indigenous
+matured
+journeying
+clutching
+vocal
+hip-hop
+Local
+baiting
+flagship
+doorsteps
+starved
+greased
+electrodes
+week
+avoid
+forest
+volatile
+eggplant
+retell
+faded
+outset
+taxonomies
+collided
+treated
+awfullest
+leased
+miller
+voyage
+attain
+cabbed
+extinguishes
+ninth
+modernizing
+permitting
+whose
+passwords
+regressions
+swerving
+prognosis
+yards
+Innocent
+butlers
+chants
+silky
+woods
+factoring
+Blessed
+droned
+battlefield
+finch
+alleged
+Fatty
+daylighted
+rights
+segmenting
+Flounder
+properties
+barriers
+palpable
+okay
+manager
+mesh
+jarring
+more perpetual
+titanium
+deterioration
+revision
+claw
+cherishing
+Barons
+receptive
+headsets
+truly
+snickers
+shard
+overloaded
+abilities
+retire
+rats
+problems
+draws
+baskets
+distressing
+charm
+sill
+beholding
+cassettes
+decree
+sunned
+stowing
+RADAR
+obvious
+intervening
+ornamenting
+louder
+assignments
+fringed
+beware
+stuttering
+answers
+Homer
+clung
+koshering
+cooking
+settings
+rebellions
+late-night
+Personal
+overlapping
+previous
+outcry
+hopelessly
+frailest
+consummated
+soothing
+electives
+sorted
+cinnamon
+sharp
+milkiest
+Genocide
+cheek
+progressions
+characterizing
+downturns
+blooms
+pastured
+compositions
+degrees
+Fourteen
+bulldozer
+crossings
+more compact
+intense
+hikers
+rationalized
+essences
+patted
+glared
+Crossroads
+commission
+pinch
+mightier
+conform
+peasants
+articles
+deeper
+barber
+transmits
+floor
+pitchers
+freestyled
+flawing
+breaker
+culprits
+four-year
+wildflower
+classifying
+treatise
+grilling
+sole
+accelerates
+guineas
+lucratives
+tamper
+polenta
+more regular
+unloading
+technician
+Distinguished
+sanity
+hardware
+await
+crosstalk
+tailors
+mitigate
+control
+concurrent
+Sneakers
+hoarding
+ecosystem
+epics
+predicate
+keeping
+revolves
+addiction
+emphasis
+adopts
+unfortunately
+grimacing
+drinkers
+strollers
+bacon
+horsemen
+hordes
+most poetic
+neutral
+objected
+lurch
+hum
+pulmonary
+document
+Respected
+elevator
+repairman
+altars
+walling
+yours
+introduction
+recession
+sawing
+breathing
+budded
+thorn
+ardentest
+wither
+headphones
+toxic
+whistling
+stone
+dove
+stealing
+strategic
+Feat
+examined
+graded
+slaps
+languages
+Adolescence
+aerial
+co-op
+slashed
+tuesday
+generalize
+capabilities
+laurel
+gadget
+logistical
+from
+publicist
+pickling
+versing
+resonated
+stalked
+proclaiming
+decks
+endeavors
+mayo
+potato
+kills
+blousing
+backer
+relives
+loyalists
+influenced
+anti-war
+Delicate
+solace
+shopkeeper
+bake
+translucent
+toddler
+Specifically
+intimated
+illegitimate
+utopia
+warranting
+peculiar
+woe
+wrath
+meadow
+tangoed
+villager
+rocky
+stumbled
+yearns
+prominent
+kipping
+clear-cut
+vacanted
+proprietors
+purpler
+warnings
+guardian
+driveway
+links
+speech
+globalizations
+forging
+methodological
+needier
+little
+cage
+minds
+extracted
+profiting
+stood
+molds
+bathings
+a
+bikinis
+rescuers
+shoveling
+infidelity
+outfitted
+Radiation
+sag
+parachute
+blackness
+councilmen
+utmost
+subtitle
+specific
+Vocational
+cockpits
+qualities
+giddier
+shivered
+dresser
+beat
+recruit
+impressed
+jollying
+arrived
+guidance
+Indian
+sir
+convents
+farmed
+horns
+glaciers
+Conditioning
+recognise
+trouts
+lendings
+clear-cuts
+probes
+intervene
+inputting
+Nationalist
+adhered
+chilliest
+vampires
+Curricular
+most poignant
+remarrying
+coronary
+mosses
+official
+annoyances
+cantons
+regards
+schoolboy
+endorse
+poring
+alienating
+amenities
+Arsenic
+agrarian
+farewell
+cuts
+mercy
+moodier
+precisest
+United
+finches
+towered
+Program
+ad
+permanents
+imprisonment
+roundtable
+retailer
+Terra
+defender
+lapses
+soda
+overtakes
+reconciled
+attracts
+false
+throning
+Service
+surrogate
+discard
+converses
+accessory
+furiously
+modest
+ruin
+shifted
+separation
+raisin
+patron
+Immune
+enticed
+blindly
+stuffs
+ripped
+priming
+subpoena
+more noticeable
+dustbin
+operated
+jean
+more colonial
+transplanting
+leases
+traction
+consult
+procurement
+exotic
+observers
+sprout
+rationing
+arriving
+utilized
+wanned
+populace
+games
+enrichment
+faxing
+Horizontal
+snorts
+pull
+uphill
+ash
+range
+nephews
+birdies
+allying
+pried
+ants
+lettuce
+Two
+good-bye
+blog
+radiuses
+conceptualized
+reverence
+civilized
+front-page
+hill
+lives
+would
+biggest
+attract
+murmuring
+pipelines
+devotes
+hospital
+suspicious
+blazers
+clap
+terminating
+been
+maneuvered
+sleeper
+tube
+licensing
+alleviated
+thyroid
+saddling
+marginalizing
+museums
+miring
+virgins
+cloudier
+spirituality
+Encyclopedia
+greediest
+enlarged
+feathering
+crib
+pullouts
+morphing
+ships
+chambers
+equaling
+Amazing
+available
+rarely
+one-way
+pops
+crumple
+oversight
+edgiest
+ignites
+cleric
+cottage
+textile
+lotions
+apron
+helpings
+hedge
+Mist
+arbor
+tentative
+rainforest
+buffing
+monthly
+activates
+fantasies
+quiet
+decide
+seized
+forestry
+quieter
+garage
+Desperate
+farmhouses
+starve
+distraught
+following
+scarves
+evoking
+good
+personalize
+tastier
+promotional
+magnify
+saddle
+temperament
+hampering
+debacles
+torpedo
+lie
+Mundane
+dicing
+surprisingly
+reconciles
+contesting
+morphs
+england
+encourage
+veggies
+Councils
+fondest
+risk
+mayor
+progressing
+woken
+struggle
+fever
+diplomatic
+court
+Nonexistent
+operate
+decorations
+soften
+Statewide
+residence
+completely
+exposures
+abdominal
+consumes
+mahoganies
+man
+most helpful
+Ho
+situating
+pairings
+feeding
+consoling
+jab
+seduce
+correspondence
+windiest
+overthrow
+delved
+stunk
+withdraws
+insights
+stamps
+wield
+remarkably
+most singular
+globes
+woollen
+mode
+racking
+jogging
+alternating
+pecks
+detour
+braving
+designating
+smeared
+accurately
+piling
+trudging
+airliner
+guys
+recur
+grafts
+rang
+tearing
+brochures
+ruffling
+Caucasian
+Cross
+resources
+productivity
+responding
+reactor
+aw
+shafted
+farms
+indonesian
+imaginary
+polling
+catfishing
+dripped
+reef
+charisma
+skate
+frequent
+frowning
+Californian
+clamor
+cookers
+fells
+gestures
+moving
+dwarfed
+Grammy
+teen
+chest
+cellular
+colonists
+mud
+subliming
+drunken
+species
+telephoned
+hinted
+obstructions
+mandating
+Zionist
+shrills
+bloated
+Pervasive
+regimes
+halftimes
+sects
+terminate
+financially
+Reasoning
+gel
+soaring
+wiggling
+invitation
+peoples
+dieting
+bench
+depict
+argument
+lump
+districts
+healings
+tomorrow
+honed
+neglects
+commend
+grains
+greasing
+prevail
+dishwashers
+Over
+distributions
+sizable
+affirmations
+elephant
+fleck
+afflict
+loosen
+buildup
+stunt
+wields
+Subject
+purest
+denting
+more successive
+retards
+learning
+drumming
+rhetorical
+swept
+rediscover
+ranked
+tank
+lofts
+autographs
+interrogated
+encourages
+crowd
+addition
+pursued
+trendy
+entertainer
+fabling
+harries
+victimizing
+roars
+weekday
+granddaughters
+stabilizes
+impacting
+crisp
+DICK
+harmless
+calory
+hotpots
+endowing
+cute
+bordered
+invent
+hogan
+braver
+load
+reflects
+swallow
+snickering
+Ailing
+dysfunctional
+dislike
+wildfire
+pose
+guarding
+most peculiar
+polite
+riddling
+empowered
+more mutual
+shelter
+subsequently
+hit
+bewildering
+wildest
+helpless
+compresses
+storefront
+bilinguals
+recovering
+aided
+tiles
+bees
+zooming
+pieces
+eddying
+newest
+web
+fanny
+medals
+arisen
+speculated
+jackets
+kick
+shrug
+adultery
+catching
+Respect
+weeded
+ripened
+waning
+add
+brothers-in-law
+apartment
+for
+thinkers
+most casual
+front-pages
+stripped
+guiding
+Thou
+notices
+vouchers
+poses
+yacht
+avocado
+firearm
+profoundly
+frescoes
+director
+sodium
+whore
+more mystical
+voltage
+molests
+snailed
+Egypt
+mows
+discriminatory
+specter
+emphatically
+professors
+dynamics
+cartridge
+Resistances
+importing
+dome
+november
+shrimps
+Cracks
+yachted
+Letterbox
+idea
+stripping
+disgraces
+collectively
+occupies
+vetoed
+viciouses
+distinguishing
+cants
+transplanted
+stuns
+retain
+Explicit
+beetles
+largest
+Kurd
+injects
+persuasion
+coasted
+tasted
+negotiate
+wags
+initially
+murals
+erects
+spilling
+contaminant
+cantor
+flick
+Twelfth
+unisons
+slowing
+loudest
+zinc
+scrolling
+obsoleted
+corrections
+most modest
+stage
+fabricates
+verbally
+anguishes
+Ne
+ranking
+Civilized
+misused
+unborn
+simulation
+eighteen
+nobody
+tortured
+university
+Alcohol
+expanded
+bagging
+connect
+eternity
+generalizes
+brutally
+shrilled
+any
+fake
+cadet
+assembles
+ago
+wished
+flare
+simplified
+roaming
+capitalist
+madly
+transcends
+name
+fakes
+shacking
+drains
+elicits
+branding
+Technically
+headmaster
+pets
+cracked
+blatant
+quaked
+culprit
+Aluminium
+leaders
+craftsmen
+obscenity
+manufactured
+splendor
+rooster
+twisting
+Recreations
+politically
+unused
+property
+Shit
+griffins
+agent
+presumptions
+amusements
+Outside
+capable
+taverns
+delayed
+recommends
+necessity
+confesses
+spas
+alumnus
+straining
+ongoing
+junctions
+gorged
+bizarre
+moistest
+retirees
+seam
+galls
+compatible
+rhythmic
+occurs
+presidency
+soloed
+bracketed
+Hearts
+travelled
+nursed
+ailment
+estimation
+teeth
+congresses
+essence
+Miracle
+counterinsurgencies
+Onward
+spokesperson
+rehabs
+sends
+lobby
+terrestrial
+clubs
+replaced
+recruiters
+democratization
+digesting
+dot-com
+airspaces
+terrible
+undergoes
+transcend
+eyes
+seaweed
+caking
+panted
+provoking
+meagerer
+backseat
+undertaken
+ostensibly
+flees
+decorate
+Behind
+pulsing
+exploited
+snail
+orient
+institutionalizing
+sighting
+tech
+exemplify
+blamed
+vapour
+sanitation
+holy
+dispatchers
+cookie
+alternate
+rupture
+cheats
+stemming
+rancher
+foe
+chamberlain
+excess
+beaming
+collier
+puncture
+torqued
+navigate
+browsers
+perpetrating
+decorative
+cardiac
+lavender
+reproduced
+oxford
+kidnappings
+renovates
+rebating
+nutmeg
+EverGreen
+consisted
+clever
+ethanol
+compassionate
+silenced
+spirit
+cared
+wrongs
+Tire
+commanded
+intentions
+guttered
+Short
+most artistic
+swaying
+oiling
+delighting
+admits
+boyfriend
+screws
+recaptures
+madnesses
+spews
+rehabilitated
+genocide
+swirls
+algae
+saturate
+bourgeoisie
+hamstrung
+sponging
+Protestant
+created
+intrinsic
+tenderness
+butterfly
+blunter
+straightens
+homering
+knowledgeable
+schoolwork
+fans
+vote
+Unemployed
+portions
+harnessing
+duel
+nostalgic
+nutrition
+sitcom
+matchups
+clerics
+takes
+Hawaiian
+tourism
+raft
+overstated
+gauge
+listener
+freight
+amplifies
+hide
+kettle
+aliening
+voids
+homosexuals
+laughter
+guerrilla
+exiting
+damaging
+blunts
+scale
+hype
+isles
+brunches
+inflammations
+aim
+grandmother
+pecking
+mutation
+disarms
+cameraman
+statewide
+reject
+petition
+rejoiced
+superintendent
+label
+solaced
+freshly
+revenging
+persecution
+bandage
+unanswered
+lecture
+steams
+swatted
+Minuses
+cloaking
+Mecca
+conceiving
+nor
+ceiling
+convicted
+basin
+chaplains
+pursuits
+laxest
+check
+victories
+cushioning
+fennel
+scribble
+Surreal
+charter
+cemetery
+pots
+endorsements
+bested
+trickling
+stomach
+picnic
+raccoons
+miraculously
+bridge
+elite
+compensates
+handicapping
+sword
+reservoirs
+jumbling
+discounts
+climb
+painful
+associate
+demonstrating
+voice
+entails
+Voluntary
+tidal
+straw
+substitution
+abruptly
+Bug
+given
+Left
+allegations
+undergraduates
+maths
+tail
+recurred
+most timeless
+insulations
+golden
+pertaining
+philosophers
+converted
+paddles
+dwarfs
+perpetual
+strangling
+demolish
+citing
+desiring
+more recurrent
+victimized
+ones
+carpeted
+caged
+gypsies
+jawed
+chili
+locomotives
+Yearly
+lore
+psyched
+bears
+Lean
+lorries
+rake
+cafeteria
+lazier
+mri
+wine
+ownership
+densely
+facade
+austrian
+stacked
+comets
+stretch
+hedged
+leveling
+chat
+stereotypes
+pregnancy
+envied
+perch
+symmetry
+axis
+excepted
+modeling
+vantage
+interpretations
+lapped
+swallowing
+exclude
+breezing
+hallways
+bailed
+palate
+vigil
+statelier
+scholars
+confers
+beckons
+visualized
+scalloped
+blizzard
+stressing
+most thankful
+trumpet
+arrested
+centers
+featured
+finance
+gen
+intruding
+torches
+refinements
+programming
+representative
+preoccupying
+ambulance
+rabbi
+gossiping
+consultants
+solitary
+fine
+goggles
+tan
+minimize
+cash
+baselines
+dramatically
+wildly
+preach
+lenders
+overlaying
+livelihood
+victory
+igniting
+gapes
+proposal
+escalates
+end
+wheat
+napped
+countryside
+Do
+padded
+relinquishes
+contradictions
+detention
+specimen
+whirling
+wounded
+more adventurous
+jays
+residences
+identifying
+dealerships
+Jack
+paperback
+enlightened
+suffer
+burner
+fuelled
+Causal
+midsection
+source
+reprinting
+Culprit
+slimmest
+tacks
+progress
+wined
+bound
+specialized
+starches
+Refuges
+deacon
+processings
+empire
+gels
+banqueted
+divinity
+grunt
+inhabit
+modems
+pioneered
+vain
+informant
+curiosity
+abandons
+saturated
+accessary
+devised
+proton
+session
+modelled
+usages
+quoted
+Resentment
+successfully
+abused
+aboriginal
+slice
+characterized
+Willing
+frees
+sociologist
+guaranteed
+rejects
+rashest
+firefighter
+overdose
+Forest
+bitter
+militia
+nominal
+savagest
+clothe
+mourning
+finales
+concert
+disrespected
+indexed
+converse
+evolve
+Etc
+stolen
+safely
+forbid
+aimlessly
+faulty
+ironic
+glands
+more oppressive
+premiered
+sanctuaries
+deregulation
+year
+brightened
+transportation
+stomped
+laughs
+charms
+pulse
+verifies
+narratives
+socialize
+Graduate
+projection
+distances
+widest
+denomination
+prematurely
+authorizations
+remotely
+anniversary
+um
+illustrations
+cub
+dumplings
+heavily
+salvage
+pollster
+mandates
+muddiest
+schools
+peppered
+dispositions
+rogues
+expired
+reality
+serves
+idioms
+daylighting
+credential
+setbacks
+steaming
+absurd
+Peruvian
+million
+accessible
+arouse
+nipples
+shaves
+distancing
+policymakers
+graduating
+gossips
+propositioned
+tablecloth
+mired
+risking
+abbeys
+brace
+felling
+putts
+legalizes
+description
+sanction
+refusal
+considerate
+minded
+herself
+nod
+creams
+canal
+grapples
+ridicule
+mainland
+panties
+contemplating
+handcuff
+pushed
+value
+bartenders
+Lag
+Welsh
+more receptive
+stubs
+asparagus
+numerical
+Clockwise
+This
+parochial
+granddaughter
+hollow
+toning
+mere
+eligibility
+younger
+clench
+figure
+Pacific
+specializing
+fosters
+towed
+cornerback
+distinctly
+quarterbacked
+asses
+shield
+mystics
+inches
+scouted
+demographics
+supernatural
+man-made
+pluck
+defendant
+cynical
+introduce
+Crude
+sulfured
+plastering
+most restrictive
+intended
+influence
+damned
+cabinet
+gingering
+chatter
+deceiving
+Champagne
+straighten
+salespersons
+monopoly
+stressors
+countings
+pastures
+reinstate
+looting
+limo
+timber
+medication
+responsive
+khan
+increase
+grown-up
+ensued
+one
+pursuing
+bay
+economists
+breakup
+erased
+astronomy
+four
+expression
+Prominent
+Logging
+huffs
+finnish
+riveting
+taunted
+lacing
+wintered
+expands
+stubborn
+frustrating
+disregarded
+checking
+schoolteacher
+cans
+supermen
+sow
+floating
+mistakenly
+yachts
+rebuilding
+applicant
+braiding
+swords
+combatant
+Pictorial
+outcomes
+dominate
+wobble
+fascist
+Living
+tendency
+trailer
+Tangled
+neatly
+timers
+hopeless
+sleds
+roofs
+protects
+generics
+foothills
+trait
+prove
+windfall
+desk
+maintenance
+kitty
+draft
+troops
+terribly
+homogeneous
+fortress
+descriptive
+hemisphere
+Micro
+magnesium
+banker
+genes
+bravest
+reflexes
+smacking
+contemporaries
+Bluffs
+roofing
+mustards
+Berg
+humbling
+outraged
+peeks
+lances
+discharge
+necklaces
+Cream
+feels
+hot
+foxing
+climbers
+persistence
+clouts
+enclosed
+prompts
+anesthesias
+most resilient
+deprive
+vacationing
+redesigned
+affiliations
+doorstop
+cornerstones
+wire
+nail
+comrade
+propaganda
+blackouts
+escalations
+swivelled
+Fuck
+obsession
+slide
+Salient
+lawn
+headaches
+hitting
+diaper
+jumps
+strings
+probed
+slotting
+about
+has
+crouches
+digestive
+politics
+sob
+careered
+cranberry
+surfing
+marred
+inflammation
+reds
+closure
+weeing
+Basic
+pages
+demonstrated
+lime
+yanking
+infiltrated
+mailing
+pawed
+disappoint
+bond
+most productive
+auditioning
+abstractions
+Southwest
+smarter
+kindest
+populated
+crackers
+emergencies
+bounded
+blockades
+wolves
+expressions
+abolish
+fetal
+deadly
+grader
+stir
+climber
+Inside
+disgust
+operative
+creepy
+carriage
+repelled
+bald
+plied
+died
+atmospheric
+suffice
+Rose
+lowing
+Measurable
+perpetuating
+resulting
+hides
+sledding
+Parlor
+piston
+accomplished
+marveling
+schoolboys
+see
+thriller
+coherence
+electromagnetic
+funks
+ulcers
+kohl
+offences
+encounter
+grits
+ballads
+televises
+wrongly
+redefine
+cameras
+pardons
+guidelines
+smuggled
+arguments
+vitamin
+turnaround
+more palpable
+fiddle
+polls
+screeched
+stored
+Thrilling
+worlds
+representation
+cannoning
+inning
+monkeys
+graduate
+sported
+voided
+trafficked
+predicting
+slated
+Drink
+sectioning
+knight
+menopause
+primitives
+submits
+stat
+slaughtering
+question
+commandment
+bewilders
+suited
+flapped
+catastrophes
+nannying
+gasp
+hurled
+quizzing
+acquired
+surfed
+revisits
+ratify
+hostesses
+defines
+accuses
+scrambled
+semifinals
+penfriend
+ensure
+fountain
+cotton
+meats
+loafing
+complies
+ruins
+Oregano
+contains
+reformer
+database
+passports
+stereotyped
+favouring
+cellar
+lacks
+buries
+tallies
+depending
+robes
+founded
+near
+sectioned
+formula
+pitching
+Undercover
+attendant
+navigated
+belt
+vaster
+smoothest
+legitimate
+afternoon
+wrinkling
+reiterating
+masquerades
+crash
+presumes
+Outright
+Fallout
+boroughs
+eminent
+condo
+modifying
+theory
+abides
+Yours
+lobsters
+dignified
+noses
+greyed
+mingling
+Presbyterian
+transparency
+curricular
+controls
+belong
+mahogany
+embeds
+climbed
+brittle
+chanted
+spanning
+technique
+stun
+national
+bang
+latching
+spray
+alarming
+economic
+deficiency
+ambassadresses
+Fiat
+blues
+inference
+title
+authoring
+unaware
+taxpayers
+Madness
+referendum
+ale
+quartered
+labyrinth
+beers
+punctures
+metre
+ferociousness
+announced
+rings
+seafoods
+grimly
+balloting
+lulled
+restarts
+boon
+publicly
+moratoria
+Summer
+waterier
+layoffs
+Geology
+includes
+stigmas
+congressmen
+nightmare
+pc
+teamwork
+filleting
+Chilean
+peer
+exceptions
+mistaking
+conservation
+reign
+emanate
+balloons
+capping
+contacts
+grace
+parenting
+unwillingly
+parmesan
+roves
+pitied
+Hell
+busy
+pawns
+loner
+undertakes
+skyrockets
+beds
+defeated
+solidest
+gasping
+inscribe
+gall
+downplayed
+preservation
+tweed
+faculty
+volunteered
+embarrassing
+inventions
+legislative
+tobacco
+most mutual
+Knobs
+traditionalist
+Preface
+division
+aggregated
+promptly
+already
+rugs
+recoils
+alleging
+quicker
+beaches
+drastics
+miniatures
+Great
+deported
+separate
+terraced
+emigration
+equipping
+tibetan
+restaurants
+narrator
+feminine
+In
+more toxic
+admiring
+stealth
+Mourning
+breeze
+curriculum
+amounted
+most vibrant
+enact
+Nearest
+distraction
+organizational
+asteroid
+maximizes
+rocking
+DJs
+distorted
+forgetful
+commandments
+veto
+Thai
+eave
+solacing
+gilded
+fenced
+patients
+receiver
+Min
+intimidate
+concerning
+Mechanical
+functional
+transforms
+befriended
+illusions
+splashing
+sodding
+scheduling
+cushion
+co-hosted
+ideologies
+patterning
+stores
+clutched
+beets
+landowner
+ballparks
+rainstorms
+recommendations
+pond
+users
+performing
+receive
+shuddering
+UFO
+affluent
+annexing
+cyberspace
+allergic
+paced
+groans
+airmailing
+intel
+splits
+faints
+more favorable
+complements
+skeleton
+radiating
+unraveling
+festive
+reunites
+critical
+comings
+cocks
+meals
+most profitable
+overrules
+feathered
+Net
+drafted
+market
+coldest
+entrust
+between
+lithuanian
+newsletters
+schoolchildren
+southeastern
+handsome
+transcended
+plank
+reinforcing
+narration
+rind
+pouches
+dresses
+funding
+technicians
+hardly
+sufferers
+acres
+approval
+rebirth
+monarchy
+manila
+consistent
+funkier
+head
+orchestrate
+critics
+treaties
+accurate
+posed
+exempt
+prolonged
+matching
+Roe
+motorist
+jointed
+arenas
+suitcase
+engravings
+luminous
+fielder
+orphaning
+detains
+clinching
+security
+Art
+busier
+Infinite
+halo
+tabloid
+gears
+airing
+baited
+waiting-room
+chateau
+stadium
+trailing
+trash
+winces
+landscaped
+reverting
+parameter
+commencing
+Sunrise
+solidifies
+twain
+cayenne
+done
+orders
+cracks
+curve
+amnesty
+fetching
+efficiency
+coyotes
+convoys
+gorgeous
+gushing
+tests
+cares
+monetary
+rummaging
+turnover
+nerving
+speaking
+reptiles
+southwestern
+prevalent
+scoffing
+co-authored
+reconstruction
+orchestrating
+multiplying
+flatly
+extensive
+renewable
+contingency
+enthusiastics
+shallot
+preview
+glares
+nigerian
+italy
+briefcase
+distracts
+sorts
+agreements
+taint
+speediest
+cleaned
+Imperial
+camera
+insulation
+conspicuous
+hepatitis
+blizzards
+uniquely
+remained
+perpetrate
+peeped
+empirical
+Into
+woman
+Icon
+calmed
+jockey
+retirement
+clays
+raids
+relayed
+hiding
+complicate
+paranoia
+kingdom
+miss
+penetrations
+Somali
+alas
+cosmetic
+muster
+voices
+sake
+pe
+retarding
+manifesting
+seals
+route
+displaying
+input
+bread
+junctures
+Holly
+vaccination
+restart
+singapore
+disdains
+regretting
+potter
+extinguish
+dwarf
+refuting
+flings
+weariest
+more turbulent
+stew
+ignite
+earthquakes
+grateful
+celebrations
+feminist
+conformity
+getaway
+hiring
+stillness
+carrot
+vastest
+budge
+dim
+glacier
+policy
+underlined
+heeding
+mobbed
+guttering
+car
+wider
+embodiments
+lessen
+square
+grinning
+fluttering
+bullshitted
+architectural
+geometric
+analysts
+sustains
+intolerances
+boniest
+skew
+initiative
+eyepieces
+fumbles
+signing
+hinge
+amo
+via
+mole
+eyeballed
+path
+front-paging
+lets
+vertically
+Integrals
+settling
+entity
+tragic
+insurer
+compound
+tumors
+blockading
+Plank
+emptying
+convened
+grooming
+slashing
+Second
+colour
+vistas
+chewed
+complexes
+cardinal
+wallet
+mellower
+grandfathers
+Koreans
+told
+suburbs
+envoys
+encountering
+re-entering
+rain
+rested
+accounted
+plates
+envy
+behalf
+disruptive
+self-efficacy
+harbouring
+banished
+suitcases
+institutionalizes
+skeletal
+degenerate
+mexico
+courier
+most rational
+transfer
+ensures
+ask
+angrily
+Basil
+bravely
+dogging
+saturday
+aerobics
+merits
+critically
+prophets
+maneuver
+emotional
+Cylinder
+backers
+commending
+sputtered
+counterparts
+sled
+rapidly
+Awkwardly
+commandoes
+incidents
+observes
+device
+seashell
+exaggerates
+dueled
+folks
+logistics
+obedience
+necessary
+affords
+Ad
+frequency
+footnote
+impair
+fax
+sucking
+appointing
+injecting
+methodologies
+gales
+deeming
+dissidents
+embodies
+curses
+Customs
+please
+slept
+ma'am
+usually
+subpoenaing
+thanksgiving
+withstanding
+emotion
+paramedic
+Nowhere
+percents
+upstream
+indicted
+morally
+tribunals
+defiance
+bitches
+colleague
+Horrific
+preserved
+truer
+school
+crow
+Pastor
+administrators
+lunged
+Quad
+imagination
+heed
+cabbages
+sterile
+link
+kuwaiti
+dotting
+disappeared
+exemplar
+overstating
+wealthier
+tune
+edged
+tempers
+more elegant
+Supernatural
+orthodoxy
+bunkered
+bungalows
+recognizing
+noncompliance
+invasion
+draining
+Supplemental
+literature
+commonly
+battle
+suits
+cuban
+cascaded
+snickered
+Gag
+evil
+panels
+darken
+balloon
+supporters
+classical
+Romantic
+minimal
+lounges
+silently
+key
+clamp
+limbs
+glues
+dedication
+restarted
+Censored
+apprenticed
+segmented
+fumbled
+indexes
+implanted
+betraying
+exercises
+railings
+marshals
+imprinted
+interrogating
+recruitment
+mended
+lizards
+traitor
+teeters
+tads
+diverted
+quoting
+diversion
+Selected
+tidalest
+ushered
+wedging
+linkages
+northern
+adults
+more experienced
+evacuating
+gateway
+rotated
+pedestrians
+bronze
+salient
+orchestrated
+prosthetics
+smears
+Odds
+edge
+disseminate
+bumps
+strained
+insulting
+squats
+abortion
+mistrust
+hardships
+observations
+blossomed
+pueblo
+tills
+Receptive
+cooed
+update
+cooks
+delicacies
+towards
+hubs
+tattooed
+sapped
+fantasized
+most receptive
+airmails
+Knowingly
+fares
+Mormons
+disarmed
+example
+devotion
+Atlantic
+retailing
+puffier
+hosed
+manuals
+waffling
+fatter
+relay
+founds
+grooms
+jailed
+components
+playmate
+most painful
+classing
+resisted
+internship
+paraded
+urges
+persisted
+snapped
+creators
+supervise
+validates
+ovals
+coupons
+bogus
+amendments
+migrating
+enclose
+bulb
+wane
+dilutes
+rifting
+compared
+sampled
+misleading
+admires
+fastball
+motors
+Object
+flinching
+caucuses
+posit
+ejected
+harps
+punches
+optimum
+abroad
+trek
+fearing
+offending
+rehabilitating
+Indonesian
+rack
+landlord
+bustling
+mason
+mimicked
+flowered
+nostril
+heroism
+roster
+seeping
+Capital
+ranks
+girlfriends
+hull
+critiques
+greedier
+infamous
+recoveries
+invading
+retrospect
+sobered
+alive
+sloped
+year-old
+restores
+hindering
+most festive
+commentary
+devours
+predictors
+identities
+embraced
+power
+bathroom
+most prominent
+bodyguards
+incidentally
+Playful
+harshly
+dubs
+armies
+identified
+successor
+yielded
+Cranes
+finer
+quainter
+interview
+mints
+lecturing
+seducing
+difficult
+intricate
+connects
+alarmed
+profitable
+packaged
+contradict
+relied
+mortals
+drones
+site
+adventuring
+coasting
+cookbooks
+most prevalent
+transitions
+sometime
+oiliest
+outlet
+depleting
+factors
+Russia
+mafia
+Pure
+Commons
+papas
+and
+plea
+thumping
+uniform
+associates
+outlines
+underwater
+choose
+refrained
+waives
+succeeding
+probation
+cabs
+hospitalization
+lines
+respect
+averting
+re-elected
+response
+most comparable
+bombs
+obliterate
+hogs
+puppy
+hinged
+savages
+sinned
+manifestation
+disapprove
+placing
+earning
+joked
+School
+deserves
+professes
+precede
+meadows
+riot
+levees
+sweet
+Relevance
+accumulate
+introducing
+romance
+most coastal
+folding
+freshers
+belgium
+disruptions
+crawl
+dismantling
+Objective
+bakes
+women
+reel
+disrespects
+juicy
+indifference
+goodwill
+plumber
+dramas
+snagging
+templates
+Favorable
+forgives
+flickering
+deeds
+hurling
+lad
+tributes
+crumbed
+neglecting
+plaintiffs
+converges
+scurry
+roving
+camouflaging
+tugs
+reaction
+Constitutional
+bursts
+overwhelm
+routed
+presumed
+communions
+hitch
+spoils
+compartments
+preying
+broadcasters
+treating
+hog
+conserves
+cringes
+trafficker
+Emeritus
+anatomy
+franc
+bittersweet
+sushi
+inclines
+seekers
+colors
+bailouts
+centimeters
+Round
+seek
+swamping
+creak
+porter
+chartering
+self-reporting
+whiling
+neighborhood
+rivalled
+canteens
+disturb
+gouging
+presiding
+flints
+justice
+suppressed
+Saw
+swings
+Written
+sausage
+snaking
+basemen
+linebacker
+Solace
+translated
+ultraviolet
+select
+Indigenous
+medalist
+hire
+noised
+morrow
+sun
+breathtaking
+sex
+remaining
+furniture
+abolished
+races
+tendered
+bastard
+streamline
+judiciaries
+bathe
+Runaway
+jacks
+canteen
+oily
+frown
+pings
+uglier
+wildlife
+discrimination
+utterance
+distribute
+dictators
+descendant
+blistered
+rid
+caption
+uneven
+roaches
+basics
+flies
+diversest
+osteoporosis
+if
+Maximum
+partying
+antiqued
+statistically
+rivalries
+kin
+operatives
+mother
+where
+toasts
+jagging
+vesting
+wade
+uncovering
+deter
+Crusade
+fairest
+encouraging
+ethnic
+vacancies
+mansion
+chemotherapy
+wrestling
+cuffing
+halving
+palette
+gush
+transports
+bombings
+volt
+Supposed
+damage
+rainfall
+synagogue
+fervor
+cyclists
+argue
+binoculars
+earmarked
+alluding
+nobilities
+language
+sway
+Disgusted
+elbow
+catchers
+Inlet
+telephone
+mandate
+transited
+germ
+atmosphere
+skinnier
+Islamists
+villas
+handled
+directories
+underestimate
+grabbing
+directory
+plainer
+vector
+bath
+grandest
+attempts
+pregnant
+shrinking
+molecules
+Paradises
+lightweights
+punishes
+lake
+routines
+supper
+defecting
+balcony
+labor
+screwed
+pathogen
+reverse
+passenger
+samples
+optimize
+civilian
+son
+Serbian
+tyranny
+squid
+superb
+stretchers
+fabrications
+puck
+Sales
+pizza
+required
+Aggressive
+powdered
+remark
+banquets
+second-largest
+trends
+juggling
+materialized
+software
+ridicules
+climaxed
+lenses
+cooler
+kayaks
+eaters
+sleekest
+Rubbish
+muted
+acquires
+rely
+basketball
+programmed
+biting
+angles
+complication
+tape
+walnut
+losers
+Aye
+soiling
+niece
+Gi
+manifestations
+Gamma
+molecule
+enrolled
+Camping
+unreliable
+hypothetical
+junked
+blood
+proved
+Mammal
+cosmetics
+roasting
+gulped
+mulch
+antelope
+wrapper
+usefulness
+rally
+rehabilitates
+layer
+commonest
+shattered
+rotate
+privatizations
+most youthful
+verb
+duo
+illustrators
+vanguards
+turquoise
+routing
+occurrence
+debts
+humanities
+lavish
+loves
+definitive
+hospitalized
+mirage
+chamberlains
+tripping
+parachutes
+rugby
+exceed
+most formal
+found
+charming
+Serious
+unbearable
+depend
+audition
+barricaded
+annually
+capped
+demolishing
+dunk
+marketplace
+nursery
+chase
+categories
+nightclub
+orthodoxies
+whispering
+midwives
+necks
+bails
+relievers
+searched
+confessions
+tweeds
+twinning
+determine
+Wiggle
+stereotyping
+deceived
+sensors
+authorizing
+hurdling
+fifths
+surrogating
+abnormalities
+Caution
+Nervously
+disguises
+howls
+squashed
+Eastern
+petals
+stoutest
+grain
+waking
+poking
+subjects
+existential
+evangelist
+Bass
+clatter
+spine
+divided
+cookbook
+interstellar
+tether
+buddy
+tarps
+utilization
+condemnations
+management
+nobles
+unruly
+assessment
+flutters
+Ally
+scarce
+quipped
+yolk
+right-wing
+buttering
+offensive
+psychologists
+Vow
+donations
+lease
+muslim
+goodness
+castles
+certified
+admitting
+snugger
+indirectly
+squads
+incentive
+tangoing
+freak
+Data
+dermatologists
+jokes
+politer
+bolts
+spoonful
+providers
+differently
+lurches
+followed
+terrorize
+congress
+preceding
+vacancy
+phoned
+commentaries
+favour
+cautioning
+proceed
+misconducted
+layoff
+be
+tractor
+employed
+shorn
+clerks
+shedding
+Chill
+subsidizes
+curtailing
+barren
+Careful
+fanciest
+rigs
+pulping
+lamented
+smirk
+pivoted
+coolest
+tar
+flamed
+normal
+mobs
+makeshift
+small-scale
+calf
+moor
+pipe
+powerhouses
+pleaded
+most lucrative
+frenzy
+arrangements
+x-rays
+spiritual
+African-American
+energizes
+slumming
+cuff
+reps
+jeopardy
+clasps
+bear
+mopping
+Republic
+athletics
+journal
+magnifying
+more resilient
+Christ
+priors
+licking
+whores
+vents
+king
+babied
+wednesday
+in
+Englishmen
+insect
+losing
+spooned
+spear
+coercive
+floods
+gatherings
+filibusters
+blights
+pelts
+opportunity
+doughnut
+demanded
+ignorant
+anthem
+more consistent
+exiles
+toying
+inferences
+invited
+should
+vs.
+wastewater
+Equation
+dishwasher
+Unified
+soaks
+most intuitive
+explores
+smear
+Chrome
+violently
+submit
+beau
+anniversaries
+anew
+phoniest
+intertwining
+Tribal
+devoid
+equivalents
+corporations
+emits
+rein
+laments
+announcements
+monologues
+specters
+cylinder
+locale
+feature
+brochure
+mint
+Demographic
+rafter
+chambering
+shouldered
+conjures
+tubing
+quite
+deposit
+novels
+reeled
+reopens
+fleas
+twins
+big
+nerved
+surprise
+parsley
+meets
+harvesting
+madder
+tried
+expressed
+bracing
+adoptions
+most harmful
+capsule
+popularity
+garlanding
+Troll
+quarterbacking
+pillows
+acclaims
+drove
+streamed
+evolving
+stubbing
+semantics
+size
+sorry
+perfect
+dominion
+waded
+outsiders
+crouching
+mounting
+complexions
+Separate
+bulking
+programmings
+mussels
+timely
+customers
+favorite
+fusses
+antiquing
+airier
+skimming
+prescribes
+reached
+motion
+Kurds
+Gosh
+more prosperous
+infancy
+steers
+crease
+diagrammed
+Nineteen
+vinegar
+sleep
+variations
+depletion
+passageways
+runways
+freestyles
+treasured
+according
+rutted
+cultivate
+offering
+issues
+resist
+peppers
+docs
+marvelous
+homered
+hostile
+zero
+nationalist
+more delicious
+ascribed
+squandered
+cruiser
+steroid
+confound
+x-ray
+activism
+newsletter
+One-half
+scientific
+apart
+consultation
+outstanding
+doubles
+mixtures
+arbitration
+weeer
+insures
+experimentation
+gorilla
+fingertip
+politest
+verse
+riddle
+surplus
+dangling
+overtaken
+delineating
+condemns
+eschewed
+bazaar
+boomed
+gently
+hinder
+endeavor
+Glamorous
+Hip-hop
+ends
+novelist
+certainty
+swirled
+Space
+personalizes
+akin
+unskilled
+bones
+involvements
+buttoned
+branches
+circulate
+maneuvers
+lording
+omit
+singulars
+cavalier
+outdoor
+motivate
+excerpts
+fumbling
+restoring
+dirt
+correlates
+cancel
+squashing
+splinter
+spaceships
+shields
+sufferer
+clumped
+pitched
+tickles
+sang
+stemmed
+phoenix
+chucks
+costs
+kidney
+Unbelievably
+responsibility
+Torment
+deriving
+parishioner
+rocket
+sanctuary
+likelihood
+chalks
+rulings
+porcelain
+guilty
+feeble
+Fine
+arabian
+sparing
+prominence
+redefining
+reaffirming
+grounded
+wet
+crucial
+Lipstick
+technology
+its
+contend
+coyote
+inclined
+upper
+heart-breaking
+bra
+throttle
+canton
+captions
+most devastating
+populate
+maximizing
+befriends
+rappers
+nurture
+collaborator
+morrows
+likeness
+lash
+exporter
+expectancies
+lotteries
+culminates
+danced
+neighbours
+secures
+retrospects
+angriest
+averts
+Roman
+dining
+cant
+resetting
+saluting
+bleak
+inaugurates
+western
+Liberal
+prairie
+serious
+hack
+transcribe
+pervasive
+misconduct
+longitudinal
+tools
+toil
+symbolizes
+vegetarians
+PM
+hasten
+intake
+replacing
+arrays
+more flexible
+symbolizing
+cloak
+mediate
+writes
+conned
+asleep
+interceptions
+endorsing
+pleasure
+deepens
+soul
+harsh
+veiled
+spoken
+contention
+measured
+prep
+convoy
+treasurer
+iciest
+filibustered
+something
+captioning
+symbolic
+suit
+planted
+disdain
+exceptional
+suspected
+stonier
+yawn
+stunting
+dale
+most comparative
+Low
+seasoned
+hollowed
+fad
+photographic
+marvels
+scant
+aviation
+monitors
+insensitively
+crumpled
+ocean
+accent
+salesman
+advises
+scene
+spurt
+Adept
+cheese
+breathes
+exclusions
+shortstops
+destroys
+handbags
+fragmenting
+reconstruct
+sacrifices
+vaults
+duos
+underside
+heralds
+basking
+persuading
+socializations
+oddest
+savaging
+viable
+subpoenas
+divining
+aped
+differed
+copper
+communicates
+jelly
+Stress
+most negligible
+gutter
+attaching
+enabling
+posttest
+undressed
+viewed
+desert
+pepper
+coordination
+parachuted
+free-market
+calculator
+Hi
+bystander
+versatility
+monarchs
+prevented
+remembered
+dye
+pairing
+pears
+receding
+glancing
+deadline
+discretion
+kid
+hooted
+blessing
+Orders
+booth
+modernists
+primers
+gummed
+advised
+crazier
+continue
+sneezed
+irritate
+delineated
+dominions
+settles
+yoga
+implements
+craves
+borrow
+hazes
+ambivalence
+basket
+slings
+tided
+idealer
+cassette
+passes
+spontaneously
+counted
+foam
+heroes
+grumbles
+dices
+moneys
+mess
+baby
+princess
+fractions
+froze
+hoses
+premiere
+reinforce
+Suicidal
+kernel
+poetry
+half-dozens
+lowest
+finalists
+customer
+hypes
+concord
+leaned
+roadside
+Sound
+disposed
+continued
+impatience
+Denmark
+audiotape
+parted
+checklists
+closures
+cementing
+patios
+embarrass
+pillar
+cherish
+blotting
+wielded
+scotch
+broaden
+refined
+conservative
+Compound
+Sporting
+namely
+instructed
+liquor
+curtain
+atm
+stockpiling
+narrators
+applying
+corroborating
+tucks
+butte
+conceives
+fraternities
+slug
+skilfully
+playgrounds
+extremely
+carter
+cramps
+more customary
+guess
+artillery
+high
+cheeses
+gentlemen
+commander
+upgrading
+rattled
+impetus
+cartons
+evened
+ties
+macro
+sound
+enforcing
+dryer
+strongly
+chords
+success
+dreamed
+acre
+Canada
+compensate
+longtime
+Choice
+chanced
+teddies
+Chaotic
+stuck
+ascended
+opposed
+dimmed
+curried
+designation
+drastic
+decided
+scrub
+printer
+lots
+pedal
+stamp
+online
+psyches
+playful
+Spirit
+revolutionizes
+campground
+scholastic
+mustangs
+Blood
+exhibiting
+munching
+contrary
+direction
+computing
+similarly
+plaque
+compacting
+approximating
+HA
+Socialists
+requirement
+recognises
+alienated
+reminding
+unlocking
+dreadful
+goals
+Volcanic
+precipitated
+marches
+denser
+cover-ups
+Siren
+documented
+progression
+carts
+candy
+reliefs
+propellers
+clumsiest
+secrecy
+bulky
+excerpted
+homages
+princesses
+soviet
+friction
+activities
+cheating
+institution
+worthiest
+rewriting
+jeans
+conferences
+reliving
+chewing
+observing
+less
+precluding
+reckon
+earns
+retorted
+hormones
+shall
+punctuates
+pedestal
+Infiltrate
+drink
+browsed
+attracting
+backbones
+glossing
+establishes
+sweetness
+creamed
+monopolies
+shrub
+Heterosexual
+dwindling
+spicier
+menace
+amazement
+biblical
+mulches
+toughest
+scotched
+more tasteless
+alto
+tugged
+tariff
+macho
+strangely
+lately
+bracelets
+surrenders
+slims
+livestock
+precluded
+parcels
+accompanies
+Someplace
+chaotic
+bottom
+buildings
+banging
+All-American
+efficacy
+accumulating
+down
+widen
+rates
+flirted
+Otherwise
+surveying
+sowing
+retrieves
+modelings
+Bodily
+cancellation
+pastimes
+doors
+viewing
+denounces
+beacon
+pillars
+redesigns
+positively
+congressman
+anxiously
+By
+Rainbows
+teetering
+small
+grabs
+tensed
+sat
+crayons
+tiers
+postpone
+compromises
+grapes
+fretting
+strewed
+censuses
+trooping
+nineteenth
+brutal
+mow
+harried
+contributing
+diary
+genetic
+proclaimed
+pike
+chillier
+messy
+Top
+oozes
+flaming
+unfits
+Pet
+limestone
+regent
+fossils
+modem
+zebra
+therapies
+harnesses
+piazza
+descending
+large
+substantive
+rip
+thwarting
+almond
+fastest
+EveryDay
+circuit
+misting
+stagger
+mathematics
+passionate
+buffed
+hoarded
+sided
+posting
+surgeon
+reinvents
+processor
+penetration
+psi
+matches
+reciprocal
+salvation
+appareling
+workplace
+obliterating
+daylights
+purchases
+squirm
+showcasing
+met
+healthiest
+letter
+springs
+skateboarded
+manufacturer
+learnt
+accomplish
+beats
+initials
+turnout
+sip
+penises
+cargo
+super
+tool
+exotics
+loaning
+finale
+endeavored
+skyscraper
+accomplishments
+parades
+baggier
+Geographically
+garbage
+province
+channels
+pronouncing
+themselves
+seizure
+planter
+promoting
+either
+synthesized
+waxes
+savvy
+Pope
+tans
+abbey
+birthdays
+fulfil
+every
+supplementing
+decreased
+gases
+compromising
+trees
+geographic
+confrontations
+herons
+comparisons
+walkway
+asian
+clammed
+Evil
+catalogued
+classifies
+attributing
+more plentiful
+most energetic
+grasslands
+funnier
+priciest
+hoots
+brags
+solemnest
+narrating
+lengthens
+Substitution
+constitution
+bays
+resilience
+gardener
+peacekeeping
+stress
+honouring
+Annual
+Poll
+slews
+plunged
+congratulated
+enduring
+express
+three-quarters
+excused
+hushed
+reigning
+excepting
+biographer
+preferable
+marxist
+repel
+biking
+torsos
+improve
+clarified
+Geological
+drizzled
+wisdom
+subscriber
+nighttime
+synthesize
+traverses
+viewpoints
+leveraged
+dagger
+treading
+hours
+granny
+quests
+flaws
+herd
+attorneys
+Immortal
+bankrupts
+bosses
+photographer
+collaborating
+crush
+primary
+harmful
+prisoners
+preferred
+shine
+sheltering
+twirling
+Spot
+niche
+prospected
+Violas
+by
+None
+peacekeeper
+lips
+benchmarks
+savvier
+Same
+extract
+stalest
+welshed
+snowflakes
+watches
+sneaks
+covenant
+unresolved
+smoke
+sins
+manipulation
+murdering
+May
+registry
+spoonfuls
+lobster
+democratic
+widens
+swoops
+slum
+reflections
+washes
+proudest
+naturalist
+heady
+birches
+takeoffs
+restoration
+hailed
+commemorating
+notified
+rumoured
+unintended
+confiscating
+jocks
+plastic
+mythologies
+racket
+discourses
+shuddered
+mortgaging
+garlanded
+precinct
+infidelities
+moderators
+Favour
+sunniest
+verbal
+papered
+behold
+squirt
+looking
+sterns
+stags
+sprinkles
+nonfiction
+duplicating
+begin
+abuse
+telegraphing
+china
+swift
+raking
+nestles
+wonder
+sunset
+stapling
+presidential
+bluntest
+helicopter
+inflicted
+coefficient
+more insecure
+legends
+roe
+Versus
+enlightenment
+acquaintances
+radio
+exerts
+pending
+controlled
+reply
+dice
+decaying
+intercepted
+relating
+upheld
+colon
+pies
+spreading
+emulates
+supported
+homer
+salted
+shinning
+ideally
+diagramming
+trouble
+cascading
+smirking
+wooed
+facilities
+czar
+pressure
+idiom
+resurrected
+moderation
+moodiest
+mattering
+equips
+dopes
+swipe
+enamel
+foraying
+open
+outputting
+delve
+forget
+impeached
+dabbing
+thrashed
+double
+fielding
+bloomed
+preparing
+most memorable
+enveloped
+landslide
+inspiration
+defenses
+placement
+bottles
+sweeping
+appear
+championing
+disrespecting
+overnight
+arrest
+Inevitable
+specials
+popped
+reflection
+bluer
+tattoos
+overhear
+working
+vigilant
+pairs
+thirty
+Anthrax
+boasted
+atlantic
+workman
+collateral
+trickle
+Midnight
+hunting
+loosening
+helped
+Suites
+flicking
+early
+butter
+span
+kindly
+coaxed
+ballroom
+relax
+map
+harden
+more viable
+crowing
+Convict
+gates
+mistrusting
+centre
+Perch
+entourage
+dead
+illustrated
+bedroom
+beckon
+deprives
+discouraging
+stuffing
+summits
+bringing
+caroling
+notorious
+Dominican
+praises
+rasher
+regretfully
+wail
+umpires
+Piedmont
+Incidentals
+bundling
+patched
+squarely
+admired
+cabbing
+seashells
+wards
+values
+detail
+tuft
+hijackers
+left-wing
+schoolmate
+toed
+edit
+furrowing
+more timeless
+aside
+softball
+punched
+depriving
+exempting
+Fundamentalist
+deserved
+gulp
+stroller
+troubles
+Orthodox
+mediterranean
+constituency
+authorization
+bombards
+pianist
+glee
+valve
+imminent
+directors
+infected
+short-term
+fabricated
+phrased
+eight
+friday
+bellow
+quickening
+shallowed
+offsets
+generic
+same
+negligence
+nursings
+wager
+pirates
+advisories
+different
+Housekeeper
+scorning
+channel
+shipping
+heaves
+more gigantic
+most geographic
+apologizes
+paradise
+rookies
+barrelling
+portal
+most perpetual
+trooped
+producer
+Composite
+soon
+pink
+cruelties
+goose
+remembrance
+thermometer
+brutality
+magnetics
+noting
+manual
+tile
+ammunition
+tips
+subscribers
+inhabits
+lead
+sawn
+chemist
+filleted
+staffs
+casualties
+yelp
+donkey
+summarize
+treaty
+alienation
+breakdowns
+mamas
+shaming
+relief
+aesthetics
+sentimental
+consumed
+crusade
+cape
+agency
+Above
+successions
+more normal
+begs
+Substantial
+ironical
+contagious
+sweat
+safeguard
+radium
+goggle
+wallpaper
+awe
+advertisement
+unsettled
+unexpected
+reset
+fierce
+guinea
+islanders
+toasting
+jay
+conjure
+lineage
+institutionalized
+dimensions
+spends
+zesting
+quartet
+ninety
+crumbs
+Mayo
+yelping
+snicker
+masculines
+novelties
+obtained
+relentlessly
+calculated
+mandated
+Lord
+adventured
+royalty
+Savings
+aspiring
+prospers
+Cognitive
+creepiest
+riddled
+Modestly
+butt
+more youthful
+actively
+activate
+Durables
+littler
+seventy
+late
+grad
+goddess
+lies
+showering
+scarcest
+savoring
+rags
+cemented
+lush
+pigeons
+diseases
+interrupts
+pardoned
+storms
+leafed
+musicals
+shrank
+Post
+gagging
+recall
+mosquitoes
+quad
+opting
+replays
+prescription
+impresses
+neural
+commitments
+port
+bins
+meaningful
+international
+answered
+quailing
+mind
+tolerated
+paler
+shore
+trusting
+legion
+antibodies
+more recent
+intrusions
+muck
+hefted
+priest
+tailoring
+submitting
+artist
+unmistakably
+negotiator
+bikini
+stipulated
+sizes
+essays
+curse
+poorer
+Periodic
+graduates
+bamboos
+flavor
+accord
+resonates
+acclaim
+waist
+stickiest
+curbing
+seemed
+cookings
+bunds
+guilting
+infinities
+pane
+implement
+semifinal
+equalled
+overturning
+municipalities
+wavelength
+subsides
+massacres
+coexist
+spores
+wreck
+reprinted
+proficient
+drips
+more frivolous
+pouch
+shark
+bunkers
+childish
+lays
+escape
+messaging
+enhancing
+remotes
+cult
+dichotomies
+lit
+Scene
+advancements
+unleashes
+mummies
+Cheers
+distinction
+bluntly
+characteristic
+tuning
+laser
+richest
+topics
+statements
+behind
+surfs
+delinquencies
+carry
+Corps
+chamber
+sanctioning
+bleakest
+scooted
+whether
+embryonic
+prevails
+suggestive
+lavishes
+orbited
+telecommunication
+religion
+protection
+Mondays
+surfer
+transition
+winging
+topple
+intimidation
+fatigue
+pigged
+accomplishes
+captive
+renewed
+ferry
+defected
+thawed
+hardened
+materializes
+fourteen
+autographed
+defy
+blended
+anticipate
+seep
+mulched
+cube
+wag
+carves
+admirers
+Actual
+checkpoint
+unmarked
+weirdest
+Decent
+sorrow
+toppled
+symbol
+bundle
+sworn
+handsomer
+bugs
+eradicate
+culminated
+whistlers
+allocations
+augments
+cherokee
+saviors
+ratted
+were
+excerpt
+halftime
+patsy
+thirstiest
+bleedings
+picked
+incompetent
+rumor
+clenches
+fried
+roof
+adventurous
+hegemony
+work
+legal
+legitimacy
+stylishly
+lame
+cooker
+touts
+unable
+skates
+similarities
+flirting
+earring
+unimaginable
+surfacing
+brimming
+contenders
+etiquette
+Cubans
+boomers
+golfer
+numericals
+billboards
+simplify
+odors
+sympathy
+desires
+Forensic
+zoom
+anthology
+awakenings
+view
+Slogan
+fightings
+enrich
+spites
+turkish
+automatics
+analyzes
+low-cost
+perked
+re-entered
+building
+proportions
+birthplaces
+readers
+favoring
+migrates
+Abusive
+stabilized
+horrendous
+competence
+treacherous
+Desert
+dismissals
+College
+specification
+nationalities
+mobilizes
+pints
+shear
+revenged
+accustoming
+surest
+manured
+composted
+Nudes
+resolve
+gaming
+mid
+cueing
+stylists
+whistler
+banded
+wish
+accelerate
+chief
+wildfires
+confederating
+Strait
+baldest
+monk
+oversee
+halos
+deputy
+whimpered
+plaguing
+giving
+Humanitarian
+crackled
+Various
+rehab
+girlfriend
+shaving
+galling
+proximity
+bedrooms
+Asian
+billing
+hike
+marched
+Criminal
+union
+cast
+trucked
+modernism
+sacked
+receptors
+warden
+sighing
+Hebrew
+dunked
+finest
+exploding
+unpublished
+subjectivity
+essentials
+variability
+Fencing
+skaters
+Universal
+Zulu
+Harness
+dialogue
+talkative
+intercepting
+billion
+killing
+harping
+fundamentalist
+pooled
+caving
+sanded
+digital
+foraging
+installs
+muffled
+traditions
+throw
+doctorates
+linemen
+Lyrics
+identifications
+subtly
+bypassed
+dreaded
+sweeter
+Scouts
+heartier
+insisted
+sterner
+thumb
+soups
+dampening
+threw
+breads
+External
+mold
+braced
+articulated
+ill
+acid
+sew
+evergreen
+sale
+returned
+paroles
+methane
+thirsts
+lonelier
+melting
+traveled
+slung
+newscast
+extremist
+willed
+stunted
+Epic
+gesturing
+Generating
+influences
+ranch
+drift
+turfs
+loomed
+privilege
+preheated
+vet
+sci-fi
+yummier
+experimental
+neurons
+so
+learners
+kosher
+bumpers
+reforms
+tackles
+superintendents
+liberates
+repository
+dab
+maxima
+dangers
+more telling
+nastiest
+sensory
+barb
+drain
+yummy
+finding
+repelling
+reserved
+satellite
+strategically
+reflexion
+derive
+model
+governing
+ripe
+lends
+jolliest
+hays
+smooth
+hosing
+compass
+mating
+downplaying
+berg
+frosted
+extended
+reared
+elevated
+cleanest
+triumphing
+argues
+homage
+photographed
+Communists
+stiffed
+skilled
+flourishes
+exacerbating
+lamas
+outnumbering
+conditioned
+disagreement
+icons
+tin
+polities
+kangaroo
+breathless
+most plentiful
+zeroed
+senates
+wedged
+envies
+saturates
+sounds
+big-time
+milieux
+Right
+Sic
+premiers
+more versatile
+entitlements
+harassed
+tested
+stench
+get
+foraged
+transitioned
+velocity
+sinology
+larger
+tidying
+phases
+papering
+Hammers
+Imaging
+potatoes
+weakening
+celebrated
+juice
+anecdote
+rejection
+seventy-five
+exploitations
+wheelchairs
+burns
+abs
+babies
+silvered
+founding
+crisscrossing
+alignments
+Humor
+gloom
+pure
+tasting
+stainless
+waxed
+archiving
+bearded
+campaigns
+thrillers
+chunks
+budgeted
+plushest
+shock
+tidy
+curbs
+falters
+revolt
+Smoking
+bakery
+Online
+rouge
+embargoed
+venerable
+acutely
+resemble
+dismaying
+tingle
+clasp
+decking
+utilizing
+personal
+Incremental
+wool
+quake
+margins
+strategies
+vacations
+dulls
+predominantly
+issued
+asserting
+hallmark
+table
+scripture
+underestimating
+censoring
+furnishes
+sunsets
+swum
+attributes
+junk
+sailors
+modernized
+pelting
+special
+recitals
+appointee
+fix
+Victorian
+cultivates
+Is
+dads
+riskiest
+impaired
+rehearse
+Severest
+wedded
+renaming
+more conceivable
+welcome
+protests
+rifle
+Creole
+twirls
+ironed
+handing
+stick
+sadnesses
+sweatshirt
+indispensable
+sting
+emigrates
+musicians
+impression
+usualest
+releases
+speculating
+soar
+nucleus
+more precarious
+traveller
+milks
+Arabs
+permeating
+censuring
+Psi
+dictionary
+enforces
+anglers
+theses
+cubes
+evangelical
+senate
+atlases
+divorces
+pistoling
+lapping
+drakes
+gleaning
+dwell
+most imaginable
+wireless
+accepted
+horrors
+fetch
+tumble
+perverser
+waists
+rewards
+regiment
+grandparents
+savories
+dared
+interim
+stepping
+rewinding
+quietest
+Scare
+stitched
+blankest
+January
+basins
+beaching
+lifts
+accomplishing
+Penchant
+walnuts
+forgets
+Layouts
+joke
+Finite
+church
+cider
+associations
+stand-up
+Highlights
+chroming
+more interesting
+egos
+waitresses
+comparatives
+saga
+fertilizers
+zeroes
+offence
+crediting
+wall
+Birdie
+validations
+sleepy
+crosstalks
+formulating
+gender
+milky
+oversees
+Arithmetic
+british
+poisons
+refine
+limiting
+creations
+bomber
+petitioning
+sequential
+stereos
+fatal
+memorial
+looked
+Diaspora
+rinsing
+kept
+passionately
+translates
+establishments
+Sheer
+botanical
+fighting
+ethos
+summer
+gardeners
+brings
+inheritances
+residencies
+employees
+confession
+eucalyptus
+homers
+vow
+storming
+terrorists
+Steel
+doorbells
+pals
+glossier
+bulletin
+abductions
+attributions
+prison
+crashes
+westward
+cramming
+honing
+defective
+opposite
+witting
+eclectics
+Sci-fi
+passport
+thorough
+cashiers
+wake
+Hunting
+freelances
+nudest
+Garlands
+scorers
+decisions
+bobbed
+pal
+savior
+Earnest
+stabilizing
+dawning
+murderer
+clamping
+progresses
+tow
+sequel
+affinities
+six
+leafiest
+day
+wishing
+intimacies
+pack
+ensured
+profiling
+feebler
+assumption
+outfielder
+dwindled
+schedules
+spa
+improvements
+lounged
+talented
+dynamic
+missionary
+most precarious
+die
+readily
+Terrestrial
+Remaining
+Her
+fist
+surging
+thrill
+liberation
+dodged
+adolescents
+inspectors
+invaded
+interrogates
+devouter
+zipping
+organized
+drafting
+detective
+producing
+benefitted
+fly
+hood
+submitted
+landscapes
+brewing
+forced
+diameter
+merciful
+humbles
+reconsidering
+reveals
+clatters
+impediment
+alternates
+itches
+communion
+disease
+acting
+road
+booked
+terrace
+re-create
+jump
+ethnicity
+flee
+clientele
+decreases
+unveils
+smoky
+panning
+freighted
+lamer
+simmering
+relations
+freelanced
+handful
+toy
+plasters
+visaed
+most fragile
+actress
+sustaining
+basic
+digs
+warships
+abrupt
+skier
+audience
+rear
+regained
+princes
+graders
+tourist
+more obvious
+sens
+cashed
+batch
+brighter
+intertwines
+lettered
+dogged
+quarterly
+ass
+green
+leaving
+repairs
+tricking
+Terms
+stimulations
+maze
+hormone
+nazi
+mopped
+peed
+raced
+evolves
+bayed
+streams
+twinkles
+Skepticism
+trolled
+retired
+propensities
+signified
+archaeology
+marveled
+irony
+movement
+tomb
+mushroom
+vanillas
+plummet
+coastline
+Superior
+flatten
+litres
+binary
+downloads
+brightest
+knew
+congratulates
+limps
+lasting
+panther
+brains
+belle
+partnered
+smudge
+darkening
+lack
+Incidents
+stuttered
+Ceramics
+state-owned
+versed
+appendices
+rocketing
+mundane
+holier
+terrorism
+plumpest
+rue
+vinyl
+auditing
+prediction
+postman
+spew
+distributed
+turbos
+salad
+unbelievable
+Republicans
+trodden
+traverse
+forages
+most sweeping
+lifetime
+prevailed
+thuds
+overused
+bunker
+hoops
+swiftly
+succumb
+metal
+silveriest
+museum
+annoy
+whipping
+revolted
+practices
+closely
+sensuals
+Signs
+mainstreaming
+slim
+repressions
+graft
+chambered
+arsenals
+oval
+inspecting
+knowing
+supportive
+wise
+revolution
+horrific
+bobbing
+pumping
+hoppers
+broadened
+most punctual
+airway
+hauled
+meteor
+obsoleting
+edgier
+conforming
+refuses
+Extensive
+airliners
+Harmony
+sticker
+goodbye
+pressings
+directed
+mutton
+prey
+regime
+laboured
+haven
+increases
+ridges
+reprints
+ark
+Four
+poorest
+fumes
+beaconed
+balls
+actresses
+jig
+fabric
+planters
+imitations
+accentuating
+event
+misguided
+perceives
+breached
+phased
+peripheries
+weird
+limousine
+conveniently
+contents
+sod
+ground
+Decisive
+nine
+Poorly
+inexperienced
+wholesales
+ordinarily
+Metropolitan
+bravo
+easing
+essentially
+handguns
+inflations
+butler
+foulest
+communal
+confusions
+solidarities
+baffles
+headlined
+disturbs
+kind
+decrying
+theatre
+having
+zests
+nonexistent
+coconuts
+generate
+revenges
+vacation
+dispel
+component
+contradicting
+E
+rhyming
+slippers
+ledger
+positions
+vineyard
+airplane
+minted
+unlock
+coast
+way
+errands
+july
+regulators
+moonlighting
+equating
+pollutants
+passions
+fidelity
+butterflies
+verged
+Theatricals
+shallowest
+diabetes
+utopian
+disappoints
+podium
+fills
+more enormous
+dual
+slinging
+diversions
+shuttled
+always
+guiltier
+curfew
+bottling
+affections
+chronicled
+checked
+oats
+radiated
+assistants
+white
+bitch
+filter
+perspective
+patterned
+cruelty
+welded
+revere
+website
+dustier
+inhabitants
+predicating
+alignment
+disbelief
+include
+readier
+valleys
+collaborated
+variety
+drugging
+one-year
+arm
+command
+medical
+occupy
+yarn
+clowned
+ailments
+lurched
+brittler
+filters
+rebuilt
+interferes
+prolonging
+radios
+pocketed
+outbreak
+parts
+churned
+compelled
+lessens
+cadre
+regulations
+smoked
+appearing
+landed
+arthritis
+irregular
+Stakes
+inhibit
+deciphering
+more charitable
+committing
+successors
+residents
+signatures
+rover
+Third
+dumber
+birthday
+announce
+grandmas
+more humorous
+muds
+clarifies
+most ethical
+embedding
+timer
+fostered
+spaded
+generalization
+Mint
+nestled
+conceivable
+flapping
+addresses
+fled
+mediator
+unconditional
+bluffed
+wages
+aggravate
+whiskey
+overlooked
+warning
+reveal
+composts
+systematically
+batting
+slicking
+pew
+peck
+bragged
+notation
+intermediaries
+Sensing
+upholding
+suppress
+pod
+contender
+mythology
+parcelled
+White
+Meticulously
+imperial
+decoration
+cafes
+lettering
+lavished
+sincere
+geopolitical
+queuing
+handicap
+punishing
+furnished
+vista
+most comfortable
+soared
+lifting
+programmer
+peanuts
+cellars
+five
+beading
+refused
+nut
+paras
+wakes
+chokes
+savoriest
+thumbing
+discharging
+matched
+prior
+previously
+enterprises
+unclear
+tells
+enter
+potted
+provocative
+theories
+friends
+blurts
+rib
+huddled
+correction
+ruffles
+final
+rained
+apprentice
+inspires
+amen
+remakes
+santa
+rumbles
+pries
+oldest
+bunked
+renaissance
+snores
+remake
+disagreed
+superpowers
+associated
+umbrellas
+declared
+refugees
+Easter
+renovated
+patch
+devaluates
+like
+two
+improvises
+steadiest
+superstar
+scurrying
+remnants
+Crunch
+reduced
+aids
+insaner
+huffed
+winks
+while
+documenting
+Cooler
+clients
+Dominance
+hint
+ditched
+mm-hmm
+jolly
+judgments
+principally
+romantic
+jihads
+happiest
+latches
+meaningless
+anchor
+culture
+razor
+disguising
+abstraction
+tart's
+interruption
+atop
+pawn
+affidavit
+interplayed
+corrals
+shade
+numbered
+justifies
+resentment
+endless
+most tedious
+dusty
+drugstore
+Stuffing
+Mixed
+filthier
+visaing
+liturgy
+bottle
+assignment
+Unlikely
+huffing
+antenna
+ominous
+Telecom
+hastily
+middle-aged
+inspiring
+hospitalizes
+consistencies
+hindsight
+marshalling
+perched
+sabotage
+guiltiest
+terrorizes
+rustier
+Australia
+pining
+subways
+beggaring
+Naive
+velocities
+controllers
+catcher
+Inferior
+correlating
+goal
+baby-sitters
+disagreements
+Destiny
+inmates
+knocking
+apartheid
+caretakers
+blush
+premise
+slitting
+nudge
+marshal
+mistaken
+log
+brooding
+Northern
+most tireless
+howling
+bluff
+glances
+bashing
+orientations
+letterboxes
+pale
+struggled
+mislead
+jazz
+aspirations
+Trial
+stop
+lightest
+Japanese
+elevating
+keen
+maid
+speedy
+coordinator
+knit
+hiving
+sociologists
+whiffing
+labeling
+blazer
+shrugs
+replying
+berths
+jumper
+Patriarch
+dominant
+condoms
+authenticity
+e-mailing
+stability
+resides
+terminated
+notwithstanding
+frantic
+more volatile
+attackers
+salesgirl
+Quarterlies
+martyring
+spoiling
+nuggets
+relational
+teller
+substances
+relish
+had
+handle
+slicks
+eyewitness
+exteriors
+beauty
+midday
+bounding
+Historically
+warship
+sentencing
+mayonnaise
+impact
+worships
+aligned
+advertisers
+imaginative
+interacting
+says
+quarterbacks
+arrange
+pretend
+rafts
+thinker
+shipment
+river
+Bosnian
+untrue
+wreath
+bluest
+weeps
+doorbell
+formulate
+Apache
+most angular
+most marvelous
+downstream
+europe
+referenced
+humped
+crams
+collectives
+mantle
+pecan
+analyses
+tortilla
+collegiate
+existed
+taker
+undressing
+inflates
+Symposium
+pivot
+fourteenth
+perfuming
+bangs
+creating
+Superpower
+balk
+Israeli
+lingering
+plump
+exporting
+initiation
+purges
+serene
+Nonfiction
+predicates
+manufacturing
+pleasures
+bothered
+prognoses
+gifts
+overview
+uncertain
+wave
+Pediatric
+seizing
+enforced
+removing
+bagged
+tortures
+anxieties
+hails
+assassinate
+bale
+birds
+Dodger
+depleted
+attended
+spicy
+Argentine
+hospitals
+still
+telling
+relaxing
+most responsive
+Electric
+berthing
+nods
+cleans
+consoles
+superior
+submission
+crammed
+ATMs
+Goddamn
+incorporating
+dynasties
+envision
+ignoring
+faucets
+gun
+Glorious
+headline
+Directives
+ballot
+theoretical
+caravan
+analyzed
+squeeze
+ruptures
+constellations
+drugs
+tipped
+venue
+rustled
+freezes
+resemblances
+inconsistencies
+bounce
+stressed
+safeguarding
+batters
+turfed
+maxim
+implemented
+indulged
+centralizes
+most intelligent
+breathe
+humane
+stirring
+coincidence
+calms
+assembling
+shoot
+calendar
+beaded
+sign
+solidarity
+peeping
+fertilizer
+electronics
+february
+coup
+bump
+smooths
+captivities
+stank
+spiting
+debates
+tanked
+corresponding
+referenda
+enthusiast
+unless
+correlated
+mature
+requests
+realities
+inland
+quilted
+censored
+mutes
+lawsuits
+accident
+DNA
+galaxy
+plaza
+prototypes
+passers-by
+surrounds
+shaping
+denotes
+rivet
+themes
+borrower
+Commercial
+resuming
+defined
+more orderly
+conquer
+invaluable
+blinding
+randomly
+din
+discerns
+postsecondary
+highway
+planners
+filmed
+vendors
+discovering
+demands
+tax
+blow
+band
+fundamentalists
+blares
+bucking
+fired
+legacy
+requires
+celebrities
+dumpling
+cubicles
+ministry
+cheap
+coning
+sewage
+subtleties
+collect
+pined
+axed
+roses
+diabetic
+reunification
+kidnaps
+teeter
+garrisoned
+appropriations
+delays
+guitarist
+blanks
+overshadowing
+blissing
+lady
+miner
+popular
+amaze
+evidence
+prairies
+creates
+probabilities
+Bishop
+amateurs
+plating
+lefty
+pigments
+orchid
+dozen
+Similarly
+Absolute
+filtering
+boxer
+Them
+say
+orbits
+bleeding
+murmured
+murderous
+gems
+Moral
+confessing
+here
+shotguns
+buses
+intending
+propels
+allegiance
+quartets
+monies
+grammy
+thudding
+softest
+diving
+fleshes
+ice
+wont
+medicinal
+civilians
+tale
+divides
+twilight
+booklet
+boast
+plumps
+sweets
+gathers
+period
+haircuts
+nice
+minor's
+lottery
+interfacing
+steeled
+carol
+half
+alloying
+cover
+imported
+stakes
+smart
+broncos
+disappearances
+more rational
+tumbling
+lap
+buffeting
+trashes
+gloomiest
+tethering
+pertain
+automaker
+morale
+custody
+trolleybuses
+sufficiently
+all-star
+landers
+quarrelled
+accorded
+proportion
+effectiveness
+misled
+shove
+joyful
+alienates
+roadsides
+more fertile
+sent
+reduces
+newsroom
+hoped
+ridge
+merrier
+trellis
+laying
+plated
+announcement
+ballet
+undercover
+leafs
+detoured
+older
+curved
+inadvertently
+quipping
+blessings
+ingredient
+quantifies
+shuttling
+nipping
+tends
+beneath
+shiite
+nailed
+continental
+dangle
+stouts
+cleaning
+Olive
+weathering
+ecumenical
+greater
+tuxedo
+likenesses
+giants
+flea
+broods
+football
+volcano
+stricken
+salt
+most frivolous
+fastened
+observation
+honestly
+follow
+levels
+timider
+factions
+experimenting
+founder
+freestyle
+pods
+direst
+shielded
+pegged
+strolls
+changeable
+escaping
+textiles
+standing
+pharmaceuticals
+monkeyed
+dons
+she
+hounds
+leek
+wineries
+precedes
+rider
+pilgrimages
+barrels
+liberations
+farmer
+stomp
+prospecting
+intensest
+lifeless
+segregating
+derail
+covered
+coat
+raise
+galleries
+foils
+groom
+unifying
+motor
+pristine
+nope
+punitive
+looping
+retrieved
+aspires
+more imaginable
+embarks
+obligated
+recounted
+chorus
+refunded
+baggy
+tributaries
+in-house
+renewal
+least
+flares
+survivor
+koran
+shearing
+spelled
+turtles
+disposing
+schoolmates
+doved
+stewardship
+cent
+affiliates
+broadening
+doze
+beloved
+listing
+compacts
+hail
+fucked
+dodging
+interrogate
+more informal
+cougar
+dwindles
+excludes
+peep
+sapping
+bounds
+policing
+avoids
+soft
+freezing
+looped
+nationwide
+Fir
+more negligible
+festivities
+whatever
+immediately
+wares
+thanks
+bunched
+dazzled
+carnivals
+chute
+coke
+questing
+canals
+groomed
+independent
+struggling
+smudges
+obscenities
+tokens
+dichotomy
+investigation
+controller
+whoring
+lower
+stacks
+slicing
+plaques
+orange
+helmet
+fooled
+slenderest
+most frequent
+sacrificing
+blocks
+controlling
+legislatures
+laurels
+Stupid
+Center
+Bite
+cooperative
+tract
+mister
+subtracted
+script
+shepherd
+laced
+sprig
+mantels
+generated
+burden
+surpluses
+subtlety
+There
+tire
+cruises
+carver
+disordered
+Media
+repaying
+ideal
+icier
+Callings
+machine
+snowy
+summing
+threads
+perfects
+illiterates
+creature
+onstage
+rejoices
+Explorations
+judged
+dished
+most global
+portion
+application
+Taiwanese
+ascribing
+benches
+charted
+Erotic
+watched
+glimpsing
+spectacular
+wad
+sits
+packets
+reels
+dined
+best
+most familiar
+transcripts
+streamlined
+seaside
+blurt
+ornaments
+symphony
+insets
+most powerful
+microphone
+president-elect
+pulls
+holiday
+hurricane
+humankind
+presidents
+surroundings
+extremer
+biased
+ladens
+ridging
+demeanor
+lick
+conservationist
+shaved
+defiant
+handbag
+Uncommon
+archives
+pouring
+battering
+levelling
+formats
+sexes
+beta
+sugaring
+surveyed
+foreigner
+cam
+cutback
+ford
+plasma
+previewed
+fashioned
+slaves
+contribution
+paralleling
+vicioused
+playoffs
+distracted
+coiled
+dosing
+victorious
+fluent
+apparels
+trolling
+foggy
+constants
+design
+cuter
+fibre
+chestnut
+recycling
+hairier
+gills
+puzzled
+grass
+provisions
+resulted
+gentleman
+Frauds
+Tidal
+hopper
+photos
+hobby
+target
+accompaniments
+liken
+supporter
+swans
+presence
+dysfunction
+hugely
+intertwined
+better
+partnering
+withhold
+aromas
+workstation
+cancellations
+diminishes
+shortly
+Hindu
+falser
+fancied
+overusing
+Comprehension
+Ceremonial
+changes
+propped
+nursing
+metres
+slanting
+figures
+runner
+sheerer
+sharpens
+traces
+covenanting
+flocking
+stockpile
+bucked
+revolver
+equitable
+twist
+loft
+composting
+re-elect
+sky
+patented
+memorizes
+greyer
+other
+face-to-face
+slips
+high-quality
+kitchen
+best-seller
+gritty
+Stall
+motif
+handlings
+comedians
+brokerages
+imperfect
+bleed
+shoves
+paradoxically
+memorabilia
+outline
+relationship
+limestones
+guided
+concealed
+time
+outrageous
+proliferation
+refrigerates
+oust
+rebelled
+cosied
+sawed
+felony
+trench
+Pragmatic
+bindings
+domination
+Syntheses
+Antioxidant
+notably
+centennials
+threatened
+devote
+premier
+kneed
+vibrates
+knack
+half-hour
+queried
+crave
+pen
+silent
+scanning
+trial
+untidiest
+womb
+noticing
+bubbling
+permeated
+beech
+more lucrative
+headache
+respond
+provincial
+digit
+sited
+reorganization
+strands
+out
+Daily
+heard
+blinking
+frets
+loop
+transformation
+stimulating
+installing
+marked
+eyebrow
+clotted
+crap
+offers
+aroused
+interfere
+minus
+commentator
+full
+officials
+myriads
+Rapidly
+willow
+promising
+subway
+protocol
+fueling
+bitterer
+complexity
+adjusting
+insulate
+sitters
+pardon
+waiver
+clutter
+scream
+motivated
+punctuated
+tows
+more repressive
+Options
+fold
+Thanks
+ribs
+swear
+crunch
+grasped
+choices
+dies
+offsetting
+across
+uses
+pleasant
+nights
+conferred
+recurs
+experiences
+sculpts
+Korea
+shadowing
+relived
+fiercely
+narcotic
+rotors
+adaptation
+construed
+law
+minimized
+goods
+decrease
+flicked
+rapid
+designated
+cradle
+meltings
+combat
+fairer
+belonging
+metals
+passers
+shameful
+geeing
+caricatures
+clamors
+greatest
+enclosing
+exclaiming
+attentive
+tons
+researches
+obscure
+Interiors
+aquarium
+rock
+courage
+zippering
+angrier
+tabooing
+economics
+relished
+brook
+unlike
+vivid
+Notably
+excites
+visited
+privatize
+coordinators
+mops
+essential
+wipes
+exerting
+by-product
+continually
+onsets
+parodied
+unfair
+postcodes
+beauties
+airplanes
+imaginations
+dripping
+crawling
+reusing
+forever
+imitate
+doves
+elevations
+gaits
+muddying
+contaminate
+chairman
+hotels
+retailers
+criteria
+disciplining
+Savior
+pettier
+comparing
+vaporizes
+multitude
+workout
+asthma
+choke
+videotaped
+rewound
+profess
+honor
+perils
+dawns
+refrigerators
+saddest
+enforcement
+influential
+shavers
+crayon
+lumbering
+elders
+ticking
+sadness
+Venerable
+display
+cedars
+smashing
+depresses
+large-scale
+modernizes
+mistiest
+swarms
+pesticide
+Crying
+spooking
+expulsions
+coaster
+fogging
+professional
+flyings
+cried
+icing
+packing
+candle
+crack
+meticulous
+monarchies
+butchers
+Who
+these
+careers
+governmental
+Ireland
+sense
+commotion
+quizzes
+darts
+educated
+vomiting
+conclude
+Humanity
+barbers
+jack
+Scarlet
+grabbed
+noisier
+compact
+rinds
+marital
+clout
+plunge
+fence
+veggie
+messaged
+pixels
+infusion
+aha
+assuming
+proprietor
+commissioning
+legions
+hugged
+junkie
+reefs
+Nonproliferation
+interfering
+collaborate
+three-day
+nest
+entourages
+covering
+colonials
+equates
+lounge
+engendering
+songwriters
+dimmer
+estimations
+fattiest
+squanders
+differential
+fingered
+cave
+perhaps
+more frequent
+pathologist
+knights
+more temporary
+rebated
+oppresses
+performers
+forms
+music
+Upright
+duke
+negotiations
+pudding
+insist
+immigrants
+videophones
+speed
+compost
+only
+civilization
+fiddled
+pigging
+hid
+spontaneous
+tulip
+intensities
+backbone
+obscener
+Persian
+messier
+cove
+though
+amateur
+corked
+ecosystems
+pastime
+squeaks
+private
+dashboard
+atoms
+bruised
+Europe
+toweled
+purely
+promotions
+resin
+exhibits
+amendment
+haitian
+foaming
+dimes
+nips
+devoured
+agrees
+strap
+wink
+stabs
+attains
+slender
+gusting
+convicts
+approximately
+Premature
+Antarctica
+unprepare
+Weak
+leukemia
+general
+cinemas
+supplied
+increasings
+players
+inventory
+presides
+Uphill
+chapped
+cognitive
+fractured
+witching
+spearheaded
+turnovers
+savannahs
+nails
+tackled
+bathtubs
+Almond
+setting
+monitor
+groin
+Finland
+Bin
+correspond
+corrupted
+wilder
+benefits
+surfaced
+tapestries
+Patterned
+touting
+pastel
+resembles
+abstract
+facilitates
+codes
+soybean
+enough
+ukrainian
+regulating
+civil
+fuzzier
+zincs
+revolutionized
+awed
+Saint
+scriptures
+psychoanalysis
+Chosen
+bullshits
+strove
+dwellers
+more glamorous
+absents
+most fluent
+curator
+grid
+mineral
+scapegoated
+cruised
+loose
+prestigious
+moderating
+hookup
+individualizes
+Cape
+blunted
+perfected
+jam
+education
+wools
+brother-in-law
+sanctioned
+ex-wives
+vocabularies
+sizzled
+residential
+royalties
+conversation
+subordinating
+Bourbon
+barred
+fatality
+explanations
+ambitious
+perks
+drizzle
+enabled
+lightly
+loyal
+poor
+more punctual
+theological
+arctic
+slugged
+marries
+soling
+destine
+dancers
+bolstered
+piedmont
+saliva
+fairy
+Inspires
+gestured
+backpacks
+intellectually
+ushers
+pouched
+Geographical
+plucks
+sparked
+laundry
+swooped
+more playful
+diffuse
+Chemical
+nudges
+bared
+denote
+contest
+costuming
+rolled
+demand
+more tireless
+frustratings
+vicinity
+uniformly
+parkway
+tremors
+Divisions
+dizziest
+ascertained
+spun
+expositions
+playing
+asphalts
+funneling
+shimmerings
+team
+barrage
+scoffed
+wonders
+pint
+glosses
+thoughts
+speculations
+emission
+ripples
+festival
+wilts
+encoding
+hush
+commercially
+office
+tooled
+testifying
+franchise
+rusty
+pulpit
+honoured
+pessimistic
+glimmering
+Panics
+choked
+piercing
+domino
+resolves
+richness
+mm
+reviews
+carpeting
+poison
+ballistic
+lodges
+compliance
+transaction
+hardworking
+west
+terrifying
+teased
+nongovernmental
+unpaid
+conversions
+Economic
+churns
+hinging
+spiced
+shocks
+Temporal
+corners
+wisest
+reflect
+stinting
+blunder
+Spinal
+unpacks
+sediment
+dispose
+concurring
+lens
+bowman
+amino
+sobbing
+constrain
+measuring
+thinner
+timid
+evolutionary
+beings
+Mating
+circus
+pro-life
+save
+clots
+academic
+votes
+read
+padre
+instruments
+reflective
+into
+course
+flavours
+toppling
+causal
+funniest
+fade
+torque
+fresco
+advances
+unpopular
+frontiers
+traffics
+malignant
+dealers
+theorists
+agony
+vinyls
+identical
+present-day
+gleaming
+portuguese
+pinnacled
+dips
+discusses
+Iranian
+reacted
+manhood
+pathogens
+southern
+corals
+channeled
+mental
+crispest
+cooperated
+irrigation
+ah
+entities
+hacker
+nose
+elites
+garden
+tenants
+footage
+acquainted
+cutouts
+clenched
+salvages
+smuggler
+fanatic
+sultan
+reliable
+crippled
+rifling
+guide
+scour
+longer
+revolts
+Dynamic
+Infrared
+screenplays
+slots
+disturbance
+most temporary
+medium-high
+Highway
+co-founder
+rotten
+sprawling
+today
+wild
+catfishes
+doorstops
+years
+copying
+engagements
+Pimps
+resilient
+revolutionary
+patriots
+Eloquent
+trudged
+taps
+whacking
+shirt
+unite
+garlics
+leftist
+caffeine
+rebirths
+bells
+tag
+certifying
+populist
+purposefully
+rays
+darings
+Invisible
+clutches
+bishops
+isle
+offender
+clear-cutting
+smokes
+tarts
+threat
+ponders
+romantics
+improperly
+commenting
+hinds
+butters
+interacted
+warier
+expelled
+climaxes
+refrains
+mixture
+Vietnamese
+intellect
+conservatism
+obligates
+partner
+exactest
+enjoyed
+intercept
+cabling
+naval
+sensibilities
+anticipation
+wrap
+misunderstood
+bruising
+worming
+confining
+bailout
+editions
+elimination
+congregations
+subsided
+more violent
+digits
+abounding
+concluded
+recalling
+clotting
+casing
+standardized
+plead
+soled
+comes
+currying
+Noon
+Heading
+thereafter
+practise
+raincoats
+majority
+crusaded
+goats
+meanders
+circuits
+treats
+MainLand
+curries
+corresponds
+vacuum
+development
+mustard
+poisoning
+lovings
+arid
+comb
+states
+engine
+perennial
+slums
+robot
+garaging
+multiplied
+worship
+characterizations
+affinity
+steeped
+endurance
+unauthorized
+blaze
+sell
+warrior
+sharpener
+staring
+spare
+politeness
+innate
+distributing
+more desirable
+rustiest
+pottery
+unimportant
+gunfire
+magical
+said
+lifted
+dispensed
+clockwise
+Firsthand
+emigrated
+devotee
+endangering
+nausea
+tubes
+seasonal
+rattling
+knighting
+lateral
+handfuls
+citizenry
+salmon
+going
+masons
+lethal
+e-mails
+more enthusiastically
+rewind
+wrens
+matter
+Jewish
+member
+sporting
+overhears
+beeped
+interactions
+magnolia
+administration
+filtered
+drugstores
+commit
+slave
+educator
+botanies
+robin
+buttered
+participate
+Character
+calves
+autopsy
+structure
+funky
+horribler
+stern
+grounds
+reunifications
+golfs
+considers
+waiters
+parallels
+opt
+stroking
+speculate
+most orderly
+distributors
+association
+Means
+asking
+blanketed
+pupped
+Venetian
+may
+rationalizing
+definitely
+Tennis
+interplays
+dimly
+signalled
+neighbored
+hall
+Dad
+champions
+overwhelms
+yelled
+bury
+distracting
+canoed
+diluting
+unchanged
+inquired
+posterior
+outputs
+grossed
+guitar
+Foil
+glittered
+resorted
+internalizes
+Blue-collar
+sharply
+axing
+carcasses
+slab
+respecting
+Press
+extracting
+rivalry
+realization
+mourn
+sauces
+genetics
+nudged
+function
+membranes
+romances
+caning
+rockiest
+forgiveness
+flopping
+generously
+american
+remedial
+shape
+deem
+cruise
+more rhythmic
+borrowing
+bullets
+embarking
+sandinista
+breakfasts
+more popular
+farmland
+solos
+most tearful
+cardboard
+exaggerating
+headings
+silks
+frivolous
+seven
+managerial
+lengthier
+solidly
+tandem
+showered
+Nobel
+masked
+evaluate
+negatively
+memberships
+excels
+flairs
+chandelier
+Heave
+neglect
+filing
+twenty-first
+behaved
+tendencies
+nasty
+workstations
+hungered
+shootouts
+hotpot
+Provincials
+executives
+harbored
+educational
+Onstage
+gentler
+sealing
+NAFTA
+Whatever
+bristling
+oaths
+notifying
+vials
+tolerance
+growth
+aggression
+fussing
+most passionate
+bookers
+paged
+country
+arcs
+overuses
+stifled
+interpreter
+caked
+attributed
+watching
+perjury
+elects
+breach
+crates
+resorts
+freezers
+granites
+raisins
+swallows
+occupation
+releasing
+slimmed
+developmental
+truck
+permit
+strictly
+anti-abortion
+polymer
+spectacle
+ignore
+overseeing
+dynamiting
+torpedoed
+applied
+Expeditions
+exposing
+callers
+alleviates
+studding
+circumstances
+scalloping
+impatiently
+oregano
+straws
+massage
+streamlining
+campers
+rottener
+consuming
+daddy
+talkers
+haying
+jolted
+enhancements
+selective
+finger
+flank
+bums
+russian
+Jamaicans
+soloing
+temptations
+mumbles
+canvas
+coursing
+Utopia
+fungal
+indexing
+glove
+martial
+eliminate
+column
+dubbing
+assured
+awes
+decorated
+moist
+yeasts
+interims
+hilarious
+recounting
+curfews
+bullshitting
+blossom
+therapeutic
+dignity
+Amish
+licks
+falter
+heftier
+Serenities
+anyone
+interest
+condemnation
+At
+scammed
+Crucial
+graduated
+individuals
+likelier
+partied
+publicists
+mysteriously
+plotted
+Cuba
+cheers
+instructing
+deliberately
+disappearing
+tread
+waterways
+overt
+typhoon
+sprinted
+reputation
+sustainable
+subcommittees
+campfire
+repaid
+must
+avails
+repeats
+eyelids
+helplessly
+columbine
+fell
+tentacles
+part-time
+coursed
+thickening
+Radiant
+watch
+rehearses
+feuds
+caress
+acquit
+heeled
+determines
+foil
+grandsons
+caregivers
+saucing
+contained
+inputs
+recaptured
+Dead
+deducting
+imports
+divines
+Colognes
+rye
+voluntary
+distanced
+snuck
+self-defense
+reverend
+adoptive
+complemented
+preventive
+blur
+weather
+statute
+travelers
+fragmented
+drawn
+legged
+hangs
+repels
+attempting
+Hey
+snagged
+labouring
+lot
+discrete
+leopard
+mildest
+starkest
+benign
+coma
+congestion
+reshaped
+tragedy
+did
+sock
+yelps
+ludicrous
+tanks
+introduces
+cowboy
+goddesses
+diocese
+sponsorships
+joined
+tints
+tore
+ached
+spread
+assholes
+receded
+dolphin
+distressed
+mentions
+drilled
+teed
+narrated
+faceting
+Shiite
+elm
+glass
+preacher
+heroines
+somewhat
+riveted
+worker
+fragrance
+outreached
+specializations
+griped
+evaluations
+churches
+profitability
+shows
+objections
+staircase
+remembering
+packer
+gaping
+ingenuity
+exceedingly
+I
+embarrassed
+Potential
+sergeant
+revised
+emanates
+hysterical
+quantify
+follies
+incoming
+northeast
+linguistics
+interpersonal
+hookers
+unemployed
+pupping
+exaggeration
+invite
+dozed
+resurgence
+excellent
+inventors
+utilize
+banana
+outreach
+outskirt
+precipitate
+liners
+glasses
+weekly
+outsider
+viral
+misconducts
+pavilions
+variances
+fogged
+disconnects
+geological
+December
+Devoid
+dock
+Martini
+sizzling
+vaguely
+betrayals
+pebbles
+crackdowns
+fume
+overstates
+ratings
+intrigue
+pharmacies
+bayous
+customize
+wasted
+unexpectedly
+sicknesses
+applications
+multiplies
+imposes
+liberated
+discerning
+miners
+bark
+inhibiting
+shortage
+daycare
+polity
+consolidation
+shrink
+insecurity
+digging
+susceptible
+As
+plumbing
+dims
+carved
+browse
+cradles
+rodents
+energies
+flair
+contaminating
+antiques
+hiking
+heavens
+levied
+bus
+boomer
+disasters
+autos
+shy
+diagram
+obsessed
+elbowed
+radiate
+inadequate
+confederate
+casts
+darns
+glazes
+admitted
+mothers
+fry
+lone
+remarked
+old
+anyway
+dearer
+ruling
+more tremendous
+discarded
+miles
+Nature
+undecided
+slacker
+Mummy
+Raw
+panics
+caresses
+wading
+evade
+episodes
+idealism
+westerner
+makes
+bullocks
+more gradual
+baseline
+coded
+buttes
+tug
+relying
+offstage
+more formal
+photo
+subordinates
+sneered
+ribbons
+tires
+scratch
+pausing
+fantasize
+after
+fraudulent
+bushes
+downward
+simmered
+cladding
+ejects
+bunching
+wildernesses
+endowments
+specialization
+inauguration
+bureaucrat
+playwrights
+Eleven
+project
+networking
+journals
+His
+salsas
+sweaters
+ventilation
+combo
+prosthetic
+cartilage
+inflict
+spokeswoman
+disciple
+conceptual
+smiles
+incumbent
+squeals
+flattening
+slyer
+struts
+figs
+swat
+stewards
+gravest
+recognized
+Covert
+aching
+chain
+pollute
+okaying
+breaks
+blasts
+draped
+occupant
+stairwell
+replenished
+Confused
+cringing
+township
+blew
+subdued
+lousiest
+spying
+neatest
+stately
+connector
+patent
+inhibits
+managing
+mitigating
+war
+handgun
+Conservatives
+couples
+blocs
+heating
+stride
+disadvantage
+binds
+exist
+Buff
+speculation
+customs
+gator
+affirmatives
+fasts
+bookshop
+chinese
+entitlement
+acing
+trio
+pupils
+willing
+flail
+Hotels
+persuade
+Biblical
+servicemen
+more original
+polled
+rougher
+scenes
+sheared
+certificate
+throttles
+expressing
+roughly
+whoever
+marlin
+series
+jollies
+thinnest
+heartbreaking
+hammer
+Buck
+degrade
+internalizing
+writer
+contended
+sinus
+inconvenienced
+enrollments
+majesty
+especially
+adapts
+amplified
+counters
+grooved
+normally
+Serpent
+congratulate
+acknowledgment
+calm
+hip
+creamy
+climbing
+saddled
+guardians
+Hip
+herbs
+suspending
+signature
+sizing
+jumble
+related
+peaked
+shady
+derrick
+flaps
+traitors
+socked
+girls
+lured
+pruned
+valuation
+turquoises
+in-depth
+ingenious
+ears
+dairy
+served
+dominates
+party
+Why
+habitat
+absences
+July
+centres
+scenarios
+melted
+Spinach
+esteemed
+mites
+mediators
+subsiding
+unity
+awing
+stylish
+inscription
+homes
+garland
+canaries
+erupts
+sophomore
+mysteries
+mommies
+distractions
+attaining
+involve
+Explosives
+parroting
+spit
+signals
+vomit
+towels
+greetings
+cancelling
+lapsing
+insulating
+concentrates
+expiring
+enemies
+slicker
+creatures
+interprets
+ghostly
+stopping
+more passionate
+Schooling
+leanest
+juiciest
+tooth
+Cattle
+domes
+lab
+miniature
+haloes
+commissions
+indulge
+stretching
+memorandum
+recoil
+detours
+reclaims
+certificates
+pup
+coercion
+Salt
+birdied
+prying
+taught
+narrowing
+snaked
+Pigment
+butchering
+accountable
+bony
+drapes
+slides
+evening
+ready
+afloat
+bunking
+declare
+Liquids
+jawing
+overturned
+chews
+pimping
+craft
+thud
+throttling
+whacked
+bubbles
+edits
+writing
+uh
+co-authoring
+pinnacle
+firstly
+originates
+societal
+informants
+mariners
+tweaks
+reverences
+boring
+placebos
+taken
+maiden
+puree
+massing
+birth
+regulator
+over
+booted
+handler
+weary
+queerest
+burst
+musician
+intruder
+rouses
+unison
+citizens
+sickest
+maybe
+expresses
+medium
+impoverished
+hives
+brushes
+profits
+most critical
+magnolias
+aluminums
+excited
+decoyed
+facilitate
+precautions
+crying
+destiny
+chargers
+profession
+rodent
+circling
+predictions
+startles
+Fools
+scandal
+tunnelled
+palestinian
+developed
+standoffs
+disability
+scribbled
+Tarp
+spore
+glare
+more analogous
+organisms
+whooping
+marking
+determined
+rare
+recessions
+jar
+totes
+clubhouse
+locate
+slip
+questionable
+procedural
+conserve
+repetition
+wreak
+swiped
+sobering
+inflicting
+racers
+zippered
+arousal
+alloyed
+media
+pacts
+visions
+merchandises
+buzzing
+images
+hazel
+businessmen
+simulate
+victim
+quickest
+timetable
+overlaps
+conductors
+wearing
+flap
+obey
+mentalities
+tilled
+carnival
+adhering
+magics
+Dopes
+intuition
+pundit
+enticing
+footprints
+additional
+processions
+eclectic
+opts
+stormiest
+Outlaw
+comforted
+mechanical
+drawers
+hayed
+iller
+parents
+Baltic
+quarters
+Music
+blueberry
+more famous
+kilometer
+leaves
+predictive
+afterward
+dysfunctions
+midsections
+suspend
+soccer
+mainstreamed
+explorer
+embarrassments
+crossing
+grandchildren
+vanilla
+cad
+strata
+staffer
+rethinks
+crisper
+wows
+phonies
+planets
+flops
+radiant
+exude
+parody
+denials
+bureaus
+Abyss
+acknowledge
+familial
+tea
+plays
+differentiate
+trainee
+dissatisfy
+canopying
+heaving
+plagues
+Botanical
+overhaul
+sources
+militants
+vocations
+realised
+solves
+outreaching
+gowns
+classed
+undergone
+buffalo
+refuted
+precincts
+hamburgers
+rafted
+elastic
+FAME
+defect
+translating
+holders
+reflex
+more patriotic
+thermos
+vaccinations
+inaugurating
+bankers
+moslem
+rethink
+worry
+stool
+piles
+argentine
+designate
+forks
+rehearsals
+seaming
+upscales
+Biologies
+waitress
+except
+solve
+strict
+silliest
+slightest
+likely
+priding
+confined
+voltages
+harasses
+blink
+frigid
+combating
+snarling
+plateaus
+underlay
+hospice
+arcade
+nosed
+Comfortably
+broadcast
+donates
+frequents
+ticks
+wiggle
+tiptoe
+conserving
+proudly
+Sioux
+horror
+influenza
+budding
+bounced
+blotted
+block
+roofed
+buffered
+guitars
+tortoises
+headnote
+sites
+overruling
+scrambles
+squad
+breast-feeding
+scarlet
+juts
+cap
+footballs
+ray
+confer
+revealing
+emphasizes
+assists
+earliest
+grate
+main
+encounters
+spawned
+constitutional
+mission
+Out
+snap
+coincided
+cease-fires
+pulling
+lending
+surpassed
+deciding
+embarrasses
+Female
+leveled
+fees
+espionage
+cornmeal
+overtake
+homesteaded
+superiors
+more plausible
+differences
+kinda
+sloppiest
+result
+episode
+vase
+quirky
+merciless
+rouged
+instantly
+fining
+antiquity
+collars
+Lifetime
+Arm
+organic
+absorption
+stimulated
+abstinence
+drifting
+waive
+message
+apache
+ancestral
+sleepiest
+written
+bodily
+authorities
+butting
+daunting
+expressive
+blossoms
+erases
+importance
+manners
+Mining
+most artificial
+Suiciding
+leaner
+mustang
+impoverish
+all-purpose
+evoke
+compensating
+grimaces
+lulls
+shopped
+editors
+guesses
+abstracting
+scrap
+settlers
+mustered
+recreational
+seems
+denmark
+reinvent
+contamination
+defaulted
+socialism
+shakes
+shines
+cages
+subjected
+truest
+clues
+waver
+audible
+delivered
+stimulation
+masculinity
+decker
+dipping
+canning
+x-raying
+shops
+wintering
+visas
+medicine
+oat
+prevent
+instants
+scrubbing
+more mortal
+accords
+Literal
+concentrate
+interrupt
+hotdog
+lists
+microwaved
+directive
+testosterone
+skippers
+Adult
+braking
+sobs
+neighbors
+elicited
+strikes
+journalists
+activists
+crickets
+schoolbags
+argued
+aggressive
+hassle
+floors
+contending
+confirmations
+orgasm
+Operator
+hometown
+bandaging
+cubs
+advertised
+hooting
+Extension
+repeal
+manors
+quilt
+rig
+warranty
+engagement
+cozy
+excavates
+somber
+dog
+morals
+review
+syndromes
+flush
+motivates
+partisan
+two-hour
+fragments
+belting
+damn
+rogued
+Perspectives
+smoother
+raging
+drawbacks
+passion
+Baptists
+shimmers
+fluently
+recited
+weeks
+cot
+whirls
+comparatively
+spreads
+crabbed
+crouched
+normative
+burn
+grays
+asset
+den
+pinpointing
+quarried
+deportation
+form
+expected
+Powers
+equipment
+integration
+indicating
+napkin
+transparent
+announcer
+drawback
+spouse
+cupboard
+scotches
+attainment
+acquaintance
+doles
+screenings
+rug
+fault
+occasional
+wills
+more gorgeous
+obscures
+dividend
+option
+piece
+calls
+clouding
+adjusts
+goosing
+most ornate
+corpora
+skips
+portrays
+exec
+softened
+Audibles
+clearing
+bug
+startling
+leasing
+fund-raiser
+overuse
+toad
+most explosive
+low-income
+more dangerous
+serbian
+baseball
+extinction
+Cosmetic
+embassy
+faintly
+veil
+pediatric
+tattered
+mucking
+reason
+galaxies
+keyboard
+scoped
+foams
+broker
+calcium
+sweaty
+syringe
+duller
+appliance
+absenting
+ranges
+plateau
+safari
+correspondents
+eccentric
+pests
+groaning
+idled
+dairies
+payoffs
+scowls
+sputtering
+destroyed
+emotions
+contenting
+assertion
+encircles
+nationality
+jets
+wears
+complied
+stoves
+franchises
+travelling
+detainee
+iraq
+implementations
+painted
+rot
+knobs
+humoured
+diplomas
+recedes
+sharing
+manifests
+felons
+braved
+skewered
+Constituents
+insetting
+winery
+vineyards
+cilantro
+most serious
+followers
+starters
+diverts
+achieving
+flames
+jumped
+woodlands
+termites
+offset
+battalion
+appropriate
+dogs
+buffets
+re-created
+comas
+undoubtedly
+defining
+overtimes
+linebackers
+saluted
+came
+blogging
+grumbled
+tender
+mysterious
+open-ended
+proposition
+bribe
+chad
+smallpox
+appropriation
+awaiting
+discontinuing
+most spacious
+laboratories
+vengeance
+tosses
+francs
+will
+opinion
+Unnatural
+Confucius
+committee
+rumbled
+most trivial
+depressive
+lusher
+casuals
+hunter
+boiler
+sheets
+minting
+ravens
+heir
+grins
+mesa
+firing
+server
+swiss
+distinctest
+advisors
+cautious
+acclaimed
+propulsion
+fucking
+greengrocer
+copyright
+They
+Paramount
+injection
+versatile
+recurrence
+greek
+employment
+exceeded
+introductions
+privates
+showing
+succeeds
+fortune
+First
+peddling
+merged
+delivery
+compressed
+derailed
+insecurities
+troughs
+braces
+defended
+skyrocketed
+sperm
+peeked
+speared
+brittlest
+steering
+Broccoli
+jewellery
+muddier
+buoy
+lighters
+act
+excelled
+divert
+inserts
+gained
+your
+taxonomy
+major
+pearl
+box
+scooping
+hypothesize
+evaded
+author
+physical
+beddings
+combs
+scoured
+dosed
+bonuses
+shivers
+Electronic
+fund
+salty
+rafters
+body
+regions
+ultrasound
+packaging
+Functions
+ace
+visit
+company
+chickens
+asserted
+driving
+girl
+voting
+Santa
+germs
+reveres
+valuations
+cures
+whiskeys
+portable
+ornament
+folk
+suppers
+robustest
+polluted
+costumed
+dominoes
+tray
+forthcoming
+Confident
+hopeful
+hummed
+heeds
+webs
+discoursed
+rudest
+outcome
+tees
+Metal
+clipping
+began
+funguses
+toys
+safe
+trekking
+limper
+guests
+individually
+flights
+vapors
+disdained
+automobile
+brightness
+ghosted
+good-looking
+or
+poured
+pluses
+seizures
+Turkish
+cherries
+slimmer
+scramble
+porn
+configuration
+theorist
+toyed
+renowned
+submerged
+bartender
+solidifying
+overridden
+stomachache
+gens
+outbreaks
+fifty
+Cooperative
+inspirations
+Stigma
+punted
+doing
+falcon
+persistent
+year-round
+evidences
+terraces
+notched
+scorer
+vanity
+providing
+tasteless
+plague
+exhales
+triumphed
+underestimates
+lipsticks
+hurriedly
+operational
+carpenter
+uniting
+refresh
+postage
+employer
+diver
+neutralizes
+obsessive
+night
+shortened
+shrinks
+Dust
+Minimal
+iconic
+needless
+rupturing
+crews
+performs
+conversion
+visa
+expertise
+ethnicities
+most noticeable
+validating
+neutralize
+pollock
+reproduction
+hero
+lobbyist
+joking
+bicycling
+migrate
+lent
+telecommunications
+bipartisan
+limpest
+doorstep
+giraffes
+filmmaker
+flows
+refrigerated
+championships
+gave
+cafe
+brink
+trail
+Alaskan
+low-key
+saucepans
+fuses
+poets
+admit
+Physical
+frontal
+confirm
+accessibility
+graph
+worded
+color
+whispered
+summaries
+geographically
+incomprehensible
+scold
+canister
+Thousand
+gear
+foci
+Egyptian
+denial
+provoke
+zest
+chapter
+exams
+crossroads
+slenderer
+intimidates
+Bipolar
+raid
+grappled
+byes
+formulations
+inquiry
+litre
+destroyer
+lousy
+temperaments
+bugged
+smoothed
+helium
+conspires
+grease
+typical
+more bored
+redwood
+earmark
+Renaissance
+everything
+Angel
+fuel
+villain
+treasures
+fathers
+cougars
+spice
+plumbings
+rations
+coached
+exhaling
+masts
+academy
+puddled
+devises
+emperor
+persist
+Immunity
+nova
+honks
+colombian
+rage
+Recreational
+snowier
+darting
+riskier
+grads
+drillings
+opening
+diners
+redeeming
+planting
+excite
+warped
+housekeepers
+corrects
+thresholds
+toils
+behavioral
+sculpture
+waft
+rum
+punishments
+cornering
+marshaled
+teaches
+facility
+fashionable
+fragrant
+deeply
+america
+roller
+hope
+touched
+becks
+erratics
+concerts
+pence
+danish
+Hearings
+divorce
+more prevalent
+converged
+estate
+stair
+gross
+anonymous
+varieties
+positives
+fellas
+bellied
+sinner
+closes
+scooting
+scrapping
+flavouring
+ecstasy
+most plausible
+combined
+inherits
+Christmas
+conjuring
+refers
+benchmarking
+overrode
+koshered
+scalp
+knife
+spouting
+Accuse
+servants
+agriculture
+scattering
+chopsticks
+desolate
+loan
+Effective
+Recording
+hesitated
+guerrillas
+barraged
+groaned
+foolish
+diversified
+vegetables
+land
+society
+precise
+re-election
+trials
+minibus
+musical
+nervous
+resume
+shamming
+Numerous
+intrudes
+boozing
+Intimacy
+honk
+more critical
+sunrise
+summoning
+multiply
+slain
+carriers
+spotlights
+more fragrant
+cams
+scheme
+funeral
+invades
+snags
+poignant
+against
+fancy
+alternately
+infrastructures
+Delta
+arranging
+tractors
+stewing
+models
+group
+particles
+destruction
+cocaine
+contaminates
+newborn
+archaeological
+Fucking
+sowed
+moments
+copping
+resource
+indications
+oilers
+buy
+stroke
+uttering
+crooking
+Room
+centrist
+wedding
+vitamins
+pumped
+Junior
+adoring
+saudi
+more painful
+interruptions
+mesmerizing
+inmate
+asphalting
+almighty
+humble
+histories
+omitting
+illuminate
+pretested
+leashed
+amplifying
+thundering
+backseats
+Here
+silhouettes
+lofting
+nowhere
+more hopeful
+overhauled
+organ
+grit
+on-site
+former
+leapt
+clean
+kitting
+spooning
+rival
+delivering
+buff
+Hungarian
+discovery
+concur
+ring
+millionaire
+abode
+slugging
+Sure
+rise
+preventing
+inhaling
+illustrating
+caliber
+video
+supervises
+investigates
+siren
+skipper
+tops
+mitigates
+bankrupting
+precipitating
+ravine
+Downtown
+difficulty
+uprising
+materialize
+clinch
+appearance
+busies
+heron
+phase
+Fair
+anguish
+visiting
+deprivations
+Striking
+calculations
+peak
+warmed
+shutdown
+heap
+bloodiest
+grayer
+quips
+backups
+story
+drifted
+eve
+speculative
+more competitive
+sludge
+Amen
+hold
+cosier
+surveillance
+rusts
+huddle
+emphasizing
+arrayed
+pollings
+separating
+parlor
+incorrect
+heaped
+wallpapers
+prunes
+sees
+breeders
+refills
+more monstrous
+kneels
+moths
+friendship
+frightened
+Explosions
+clattered
+sprints
+warhead
+intimates
+oneself
+nightly
+winnings
+devaluating
+necklace
+volatiles
+Queer
+dazes
+airmail
+turbo
+revelation
+approached
+syrups
+courthouses
+Therapists
+buckling
+ration
+catalog
+plainly
+decrees
+deports
+Integral
+disclosures
+invoke
+pistons
+occasion
+apprentices
+almost
+champed
+thrusts
+weekend
+confirms
+dispenses
+reasonable
+wing
+Aztec
+obstructed
+rustic
+syrian
+illicit
+Collaborative
+offenders
+unprotected
+indefinitely
+motive
+incarnations
+system
+MRI
+Cosmopolitan
+Furious
+most hopeful
+hogged
+turbulent
+continent
+Class
+riding
+softens
+albanian
+slurs
+tally
+gridlock
+bayou
+re-creating
+sham
+hardening
+languishing
+arrangement
+decay
+preventions
+sidewalks
+poorly
+renovate
+sellers
+summers
+articulate
+beeps
+podding
+thick
+poppies
+varied
+advocates
+debuted
+combining
+guy
+yawns
+places
+follows
+robbery
+blanketing
+collaring
+faxes
+disabling
+Gray
+Principal
+combated
+dumps
+recreation
+straddle
+bursting
+eggplants
+likening
+strife
+pastoral
+more expansive
+Ace
+procedure
+most adventurous
+merely
+crawled
+ponder
+salts
+buddhism
+findings
+joint
+reprint
+calendars
+scrutiny
+loaded
+waterproof
+looks
+busing
+hated
+gourmet
+licensed
+anymore
+buzzes
+tightens
+bestow
+providence
+murdered
+publisher
+reclaiming
+escapes
+indictments
+relative
+weekdays
+Strategic
+lag
+retells
+apes
+quirkier
+penchant
+General
+rugbies
+serve
+archive
+arabic
+more numerical
+medics
+retrain
+structured
+amazing
+infection
+acrylic
+for-profit
+Interrogate
+tapped
+greyest
+purple
+metallic
+cratering
+promoter
+rookie
+register
+magnets
+multimedia
+appointment
+invasions
+styling
+mediums
+sharpening
+burnout
+safer
+masses
+abstracts
+cleaners
+pitted
+sincerity
+robbing
+legitimating
+depress
+gossip
+jumbles
+medaling
+stockpiled
+exaggerations
+whenever
+buyout
+incorporate
+hurdle
+cheated
+eddied
+diagonal
+have
+rectangle
+discreet
+laid
+examining
+Anthems
+internalize
+Nothing
+pockets
+barrenest
+reservation
+predicted
+telegraphs
+bustled
+beside
+purples
+Peels
+allures
+whoops
+snaring
+addicts
+re-enter
+scrambling
+analyse
+heroin
+microorganisms
+watershed
+stipulate
+surely
+Induction
+antioxidant
+wagering
+swelled
+declined
+creaks
+unknown
+catalysts
+twitch
+sinologies
+experiment
+drive
+precursors
+status
+another
+rubber
+transcribes
+mazes
+bureaucrats
+patrolling
+pornography
+spurted
+scarcer
+rocks
+hollowest
+mountainous
+instincts
+zombies
+Port
+slashes
+Abnormality
+ferried
+low-fat
+cheques
+biologist
+rosy
+rustickest
+district
+spin-offs
+reminiscent
+Dismal
+tempo
+revitalized
+repertoires
+prided
+discriminations
+senators
+regulation
+clumps
+jugs
+deadlier
+targets
+soothed
+charcoal
+photon
+grasps
+eternal
+permeate
+tossed
+burrowing
+kicking
+mediocre
+insensitivity
+dodger
+fern
+nighttimes
+relegating
+vibration
+owner
+quick
+masking
+medications
+roguest
+attach
+fiercest
+gi
+tolerating
+lodge
+popes
+savor
+decoys
+lucrative
+seated
+vignette
+disintegration
+steps
+prominences
+sparrows
+injected
+tiniest
+adorned
+contemplation
+firsthand
+rimmed
+resigning
+featuring
+transmitter
+fro
+abnormal
+lids
+probable
+updating
+Reject
+pigeon
+doled
+murmur
+auburn
+Rum
+molest
+redirected
+towel
+dogmas
+newspapers
+pedalled
+viewpoint
+rinses
+magicians
+observatories
+amongst
+restraints
+dine
+sketches
+pinking
+lighter
+wilting
+wealthiest
+speeds
+heartbroken
+Shah
+amounts
+viewers
+whisper
+offshore
+displacements
+radically
+microbial
+postcard
+watering
+persons
+proportional
+switched
+performer
+conspiracy
+withstood
+hurting
+specks
+camouflages
+seconded
+emerald
+user
+con
+vacanting
+scientists
+dependents
+lock
+metabolism
+forearm
+killers
+Plans
+more singular
+minerals
+licked
+etch
+needling
+others
+receptionist
+picketed
+beeches
+sturdy
+lunges
+mindful
+more seasoned
+more peculiar
+ticked
+occur
+humping
+co-authors
+fashion
+Unique
+lieutenant
+skinning
+expectation
+traffickings
+cleverer
+spiral
+stumped
+wiser
+dreads
+Fundamental
+backwards
+sprawled
+chop
+skins
+toilet
+craziest
+investigative
+fireworks
+deflect
+first
+more thoughtless
+continuously
+grazes
+garrisoning
+combination
+Forearm
+preaches
+poke
+ability
+flags
+glance
+kisses
+artery
+runners-up
+documentation
+psychology
+chemistry
+tackling
+Adamant
+eldest
+spells
+graphic
+champing
+seatings
+thawing
+liking
+aspect
+vaulting
+Pandemics
+ancestors
+crushes
+visualizing
+vision
+protector
+Material
+shorts
+overlap
+shriller
+wilt
+Fortunes
+strokes
+suppose
+dicks
+poisoned
+pureed
+weaves
+named
+refrigerating
+vice-president
+Realtor
+crisscrossed
+comfort
+Italian
+firmer
+skirted
+jarred
+intensifying
+loudly
+spectral
+withers
+cloaks
+Kuwaitis
+cons
+cooling
+deceased
+admirer
+repercussions
+clove
+scapegoats
+troupes
+historical
+seedlings
+violated
+punt
+honors
+generators
+trader
+plentiful
+stiffest
+stoniest
+transmitted
+ex-wife
+beverages
+youngest
+dodges
+bright
+steroids
+altos
+eighth
+annoying
+loftiest
+inclusion
+stronghold
+excitements
+deciphered
+flyer
+conveys
+semester
+stuff
+comprise
+benefitting
+stationary
+subscriptions
+vested
+shorten
+Pakistani
+belief
+roved
+half-hours
+aft
+studies
+mandatory
+mirrors
+claps
+oracle
+Conscious
+hackers
+involuntary
+bidders
+crookeder
+marlins
+meander
+Muses
+aesthetic
+optimist
+sentenced
+promises
+Surprised
+bride
+dissatisfies
+skinny
+raincoat
+plannings
+barrel
+Interestingly
+engineered
+reminder
+Faithful
+gee
+adapted
+dissolves
+candlelight
+exudes
+rubbish
+practical
+consoled
+crumbles
+panel
+Inviting
+all-time
+Crimsons
+racism
+wittier
+inaugural
+lesson
+investigators
+melodies
+propagated
+more notorious
+inequalities
+pickled
+gaited
+warrants
+confrontation
+simultaneous
+artistic
+ensuring
+clogging
+pet
+rims
+shells
+pews
+profoundest
+seventh
+turns
+snake
+aces
+scared
+allegedly
+banishes
+failings
+dictating
+levers
+lowered
+customized
+combats
+switch
+freeways
+wrapped
+betrays
+improvisation
+Success
+Produce
+massive
+plasmas
+irritates
+tremendously
+chi
+tailored
+retrieving
+levy
+Garment
+legalized
+rising
+wrongdoings
+wellness
+metaphor
+ailing
+ireland
+condemned
+rodeo
+fairs
+desperate
+Insurgent
+lied
+fiery
+peeking
+hydrogen
+plumes
+emanating
+real
+competitiveness
+soil
+unhealthy
+stormier
+staggered
+backyards
+parchments
+knows
+Alternately
+unusually
+Thermos
+entertainers
+bow
+timeline
+rail
+glow
+sandy
+flannel
+manifest
+off
+ball
+baton
+Must
+graduation
+clauses
+lesser
+dictate
+chimed
+novas
+moreover
+ample
+endangered
+Disruptive
+skylines
+deterrence
+cause
+traders
+lineup
+colons
+knacks
+slimming
+gardened
+responses
+stoved
+refund
+approving
+underlines
+crumples
+prized
+hardens
+supposedly
+marginal
+sturdiest
+washrooms
+preclude
+widow
+Hangings
+diminish
+most mystical
+purge
+grievance
+object
+aches
+shepherding
+connection
+schooling
+occurring
+stance
+wearier
+Winding
+hotter
+repays
+emeritus
+attends
+adjustment
+repression
+mainly
+ticket
+oblivious
+pneumonia
+degraded
+coins
+resigns
+Major
+counsel
+ox
+languish
+quarrels
+generous
+wove
+soak
+succumbs
+treads
+garnishing
+volume
+lettuces
+spams
+neglected
+fouled
+account
+hesitate
+pointing
+discrepancy
+Outer
+specked
+entree
+coerced
+destroying
+send
+ignores
+peering
+pelt
+most dubious
+gritted
+boarded
+flute
+wondered
+giddiest
+snugs
+evacuate
+bankruptcy
+shrouding
+cerebral
+shocking
+climbs
+confine
+more merciful
+homeowner
+guessed
+lawns
+avalanche
+lags
+soles
+overviewing
+mall
+steeping
+stroll
+vaulted
+ripening
+eerier
+deploys
+Scholastic
+couple
+halves
+adamant
+racings
+corpse
+quiver
+rocked
+librarians
+crimes
+predominant
+pickle
+entrusts
+caution
+thrust
+Intern
+pharmacists
+rippling
+tissues
+most linear
+taxation
+winds
+pastels
+whatsoever
+scholarly
+sunshine
+inspector
+contributors
+Puck
+monte
+queerer
+Platinum
+clearly
+Enlightenment
+relegates
+gripped
+provoked
+unheard
+stretches
+laziest
+useless
+Hazels
+senses
+brutalest
+mechanic
+sandals
+discuss
+applies
+dangles
+Probable
+programme
+sips
+alteration
+celebrate
+issue
+tentacle
+because
+lugging
+materials
+Mexico
+Tens
+overthrows
+adore
+destines
+jigged
+worrisome
+streak
+ordered
+ruder
+descent
+abiding
+sen
+struggles
+fixes
+physiological
+squeak
+Havoc
+sightseeing
+Salmon
+grows
+tuxedos
+sugared
+Marshal
+phantom
+marrows
+milieu
+scenic
+cue
+mutters
+Retarded
+whistle
+wardrobes
+swerve
+championship
+ax
+flying
+chromosomes
+rigging
+tunnelling
+area
+fraying
+whacks
+peel
+translator
+dinners
+auditors
+droning
+incline
+cabinets
+stylist
+miraculous
+prayer
+ramming
+their
+menu
+most moderate
+mantled
+shroud
+pans
+pill
+judiciary
+understand
+liner
+membership
+incenses
+undo
+becomes
+Waking
+dyes
+come
+Bit
+alters
+felled
+jerks
+islands
+imbalance
+fishing
+excuses
+placebo
+malpractices
+bilingual
+lousier
+lunatics
+effectivenesses
+fulfilling
+squeaked
+decades
+chef
+two-timing
+adapting
+scans
+floundering
+basil
+shack
+pisses
+favorites
+Austrian
+reverses
+worst
+Syrian
+cherished
+encouragement
+prom
+prodigies
+investigated
+Idiots
+daytime
+climax
+withering
+graveyards
+overtaking
+mightiest
+literate
+enquiry
+Parasite
+omissions
+Mister
+expectations
+central
+pursuit
+pleas
+Vacant
+made
+novice
+container
+internalized
+authors
+witches
+Roughly
+counterpart
+investigate
+doctors
+bite
+snatched
+Archaeology
+brilliantly
+partisans
+slugs
+rampant
+No
+peaking
+proposing
+rolls
+Underground
+exaggerate
+clutters
+solo
+workforces
+tables
+showcases
+stages
+blockings
+condom
+infertility
+headlight
+winged
+crusading
+concern
+challengers
+solaces
+just
+accidental
+retreat
+prosecutes
+OverAll
+bowing
+eddy
+clearances
+endured
+shrimp
+Aesthetic
+hips
+deer
+ribbed
+proceedings
+Substantive
+derives
+spaghetti
+bellowing
+lectures
+slow
+consortium
+more moderate
+sweating
+arks
+conceptualization
+sending
+disputing
+greases
+negotiation
+nourishing
+salon
+bipolar
+representing
+dogma
+misunderstanding
+hauling
+furthest
+precisely
+brightnesses
+flu
+whom
+toxics
+squeezed
+conceptualize
+more dreadful
+steeps
+pigmenting
+platoons
+anthropology
+middle-class
+tiptoes
+motorcycle
+stipulates
+mothers-in-law
+microbes
+scribbling
+outwards
+captivity
+worn
+adopting
+regulatory
+Negro
+Pastorals
+rust
+talks
+composer
+spaced
+stows
+resolutions
+hair
+ugly
+Setting
+assisting
+squander
+remember
+more
+units
+disguised
+ragging
+itching
+singers
+most consistent
+more linear
+lasted
+dignifying
+rods
+beginnings
+flooded
+piers
+rabbis
+elephants
+inns
+occupational
+rumored
+abortions
+sewing
+Fluorescent
+rigged
+disorders
+Real
+safeguarded
+chew
+commemorated
+averages
+modules
+more mechanical
+taboo
+gentle
+walled
+integrated
+dreams
+manpower
+dental
+disc
+shoulder
+curlier
+forth
+exhibited
+Alphabets
+build
+energizing
+chattered
+loading
+rubbered
+also
+biscuit
+payroll
+censor
+reasons
+landmarks
+planed
+giggle
+deporting
+banishing
+vetting
+pebble
+Circle
+egypt
+diminishing
+speedier
+goat
+most explicit
+plaid
+darning
+workshops
+sleeping
+eliminates
+berry
+slays
+mistress
+ooze
+chromes
+defeating
+damped
+ryes
+Greatest
+maturing
+stresses
+three-year
+readies
+utterly
+notifies
+unhappy
+racially
+imparts
+pilots
+Eighteenth
+splashed
+hand-held
+expedition
+requisite
+equity
+certify
+plainest
+skip
+statuses
+amasses
+dynasty
+Sandinista
+eluding
+Majestic
+moral
+Decorated
+discouraged
+census
+commute
+passives
+rubbering
+humanity
+broom
+softening
+league
+spokespersons
+expire
+movie
+masterpiece
+liberate
+needles
+mummy
+benefited
+accordingly
+misted
+recommendation
+most gradual
+ramped
+headmistresses
+bureaucracy
+additives
+navigation
+most dangerous
+tilted
+fast
+rostered
+corner
+teases
+injections
+slummed
+borne
+baking
+angel
+remarkable
+weigh
+chests
+sails
+contour
+continues
+most mortal
+fires
+occurrences
+augment
+twenty-twos
+hang
+scores
+glimpses
+dents
+rejoined
+Bacteria
+gaped
+fronted
+seniors
+maddening
+crowning
+jordanian
+champion's
+slew
+active
+surrendering
+gallons
+lamb
+zipper
+wren
+fluctuations
+nudes
+tarred
+poetries
+rout
+cribbed
+corresponded
+eleven
+Crate
+slants
+roasted
+shrill
+removal
+shoots
+admire
+swivels
+British
+deliberate
+denominations
+dangerously
+Butt
+subsistence
+blunders
+corroborates
+conditions
+consummate
+discount
+level
+ceremoney
+registration
+irish
+captor
+refrigerator
+bitching
+celebrity
+fellowships
+plugged
+subjective
+squirrel
+go
+moment
+engaging
+petite
+comment
+abnormality
+perfumes
+communications
+more comfortable
+outweighed
+kidding
+emitting
+assimilated
+sautes
+goddamn
+Pollock
+grieve
+most obese
+demise
+boldly
+islander
+crotches
+jars
+Past
+Greek
+capability
+judicial
+designs
+hustled
+pleading
+nibble
+traumas
+sediments
+unit
+palmed
+cucumber
+accustomed
+outfits
+widening
+miracles
+taliban
+upscaling
+fore
+creepier
+theatrical
+awakened
+smelled
+pentagon
+parental
+typhoons
+grunted
+weeeest
+dick
+lanterns
+hulls
+attest
+mantras
+illiterate
+windfalls
+possibility
+cribs
+scam
+Lie
+imprint
+quickened
+sexual
+mothered
+diffusing
+discourages
+disgusts
+hired
+riverside
+solicited
+assign
+registers
+korean
+suspiciously
+flagging
+forgave
+me
+first-time
+congratulation
+crumble
+earmarks
+consultant
+electronic
+discourse
+fingers
+wisely
+consensus
+crutch
+Titan
+penis
+most sophisticated
+slope
+trend
+respected
+sifts
+bee
+doctrine
+flinging
+Animated
+lonely
+Famously
+buckle
+shadowed
+higher
+lest
+facets
+anterior
+lizard
+alligator
+appoint
+Incredible
+arrow
+ham
+totals
+canons
+European
+contraceptives
+maneuvering
+censured
+delay
+stronger
+nudging
+march
+confirming
+glimmers
+spell
+measurable
+acts
+personalized
+bills
+surviving
+captains
+Overnight
+erect
+pony
+gothic
+trod
+readouts
+trending
+bows
+wrong
+Holocaust
+mortgaged
+intermediate
+Classic
+Laterals
+hurried
+teddy
+partitions
+instruction
+Bizarre
+massaged
+more intrusive
+Advice
+Iraqis
+constraint
+disconnecting
+luxuries
+travellers
+foreheads
+frighten
+trumped
+spur
+mounted
+snowing
+crumpling
+squadrons
+started
+nests
+glamour
+Able
+breaded
+healing
+ponytail
+wealthy
+vertical
+ghetto
+shaking
+ping-pong
+barkers
+wildflowers
+storm
+originated
+lander
+pedals
+quaker
+resigned
+deduction
+profile
+surgical
+sentiment
+costumes
+finely
+Puffy
+disgusting
+Pop
+enlightens
+champagne
+whales
+fetuses
+shakier
+paradox
+doling
+budging
+tunneled
+Lightning
+retraining
+bridges
+wariest
+unsettling
+optimists
+nobler
+relegate
+lanced
+contractors
+divinities
+counterterrorism
+vary
+escalating
+foggiest
+modifications
+equaled
+mentality
+tallied
+clams
+disseminated
+healthier
+periphery
+sidelined
+fouler
+rues
+repeatedly
+sexier
+fixtures
+compromise
+illegal
+exploration
+terrain
+hey
+censorship
+numbers
+blonder
+panes
+repercussion
+inconsistent
+Potent
+restaurant
+inflated
+martyrs
+baggiest
+ranger
+predicament
+falsest
+lance
+felon
+onward
+prevalence
+heavyweight
+precursor
+replaces
+dry
+stub
+tablespoons
+sand
+shit
+pumpkins
+Autumn
+matures
+productive
+hatched
+ethic
+growling
+sitting
+north
+indicting
+consisting
+creamier
+imprinting
+most customary
+compelling
+reaffirmed
+emerge
+entered
+streaming
+pancakes
+breeder
+weathered
+reorganizes
+glint
+overshadowed
+arc
+oppose
+cores
+gritting
+bombarding
+cob
+tightening
+fitted
+garner
+replayed
+briefed
+enlighten
+infers
+spacious
+count
+gauged
+weekending
+brokered
+stabilize
+pattern
+scratched
+surrogates
+often
+cozier
+bullshit
+restraint
+locals
+spiraling
+hollies
+confiscated
+swiping
+authorized
+replicates
+pours
+gym
+humility
+longevity
+shovelled
+bliss
+joining
+engulfed
+justified
+doomed
+cultural
+lunging
+intentional
+hooked
+significantly
+verge
+weighed
+glossiest
+hoists
+wickedest
+hits
+nonproliferation
+tinge
+increasing
+Ballet
+verify
+lowlands
+frequenting
+remove
+rescuer
+eyelid
+directs
+songs
+sour
+statistical
+extension
+vessel
+withstand
+spans
+storage
+abstracted
+coarsest
+most predatory
+knighted
+skillets
+tangle
+partnerships
+beached
+ruing
+expense
+markets
+animation
+perishes
+dismays
+reused
+reconciling
+diffused
+patient
+insurgencies
+liaison
+swearing
+bridegrooms
+chicks
+manures
+traced
+uniformed
+obscuring
+volcanoes
+snarled
+wake-up
+France
+unfitting
+overthrew
+chutes
+it
+sizzles
+Filipino
+two-day
+cabins
+pored
+pecans
+Ever
+archaeologist
+juvenile
+hats
+salads
+vehicle
+moored
+availability
+realising
+lengthened
+coffee
+contracting
+Impress
+complaints
+nicknamed
+equivalent
+jumping
+glories
+valves
+zeal
+weakens
+depots
+paid
+crated
+curriculums
+gushed
+yens
+took
+lads
+knowledge
+yogas
+Swiss
+snares
+repeals
+workers
+Cardinal
+renouncing
+straits
+Cradle
+most fortunate
+overviews
+incurs
+fleeing
+rescuing
+knifes
+calculating
+beggars
+sensitivities
+winner
+coos
+nineties
+repeated
+solicit
+tarted
+more marvelous
+scalped
+positing
+ribbing
+sessions
+requesting
+sustain
+losses
+gallery
+humour
+stewarding
+inlet
+margarita
+tides
+gloomier
+more cynical
+minding
+clinches
+planing
+couches
+pharmacy
+valued
+skillful
+implementing
+ways
+vows
+Buddhist
+adorn
+gobbles
+iq
+live
+discontents
+grandmothers
+more intentional
+Imaginary
+Honorable
+donating
+insulated
+transcendent
+prowl
+stow
+commence
+skipped
+minivan
+fabrication
+interact
+tolerates
+queer
+dancer
+hear
+reiterated
+bidder
+braked
+creeks
+hesitating
+lament
+reckoning
+infects
+files
+boxing
+rifled
+ratifying
+Giant
+hearing
+Conventional
+viewer
+elected
+healed
+commentators
+manipulated
+huff
+depletions
+distract
+crushed
+batted
+impressive
+Ethical
+clinched
+treasure
+mats
+skimmed
+more social
+preferably
+blinked
+revisions
+potential
+superpower
+beefing
+methods
+leafier
+zionist
+languishes
+fainted
+stayed
+harm
+gracing
+silenter
+expose
+cavalry
+Pupil
+smuggling
+spooked
+sincerer
+crating
+greys
+daring
+chap
+ceased
+distinct
+jabbing
+hamburger
+silicon
+wings
+reviewer
+stringing
+prosecute
+consolidations
+dole
+unearthed
+most preferable
+distribution
+buddied
+ploy
+mobile
+anti-depressants
+transcript
+shed
+re-examine
+allowed
+folder
+Counsel
+blah
+carvers
+litigation
+elaborated
+spite
+zoomed
+adequater
+intermediating
+optical
+whereabouts
+respects
+accompanying
+surrounding
+Seventh
+twenty-one
+rooks
+pees
+particularly
+quits
+blockaded
+sleeker
+screech
+glorying
+designations
+wowing
+strapped
+conceptualizes
+quaking
+openly
+strangest
+nineteenth-century
+minces
+obsolete
+midst
+OVAL
+recoiled
+exercising
+talent
+engineerings
+Teddy
+Trinities
+explicit
+preoccupies
+apologies
+surprises
+perceived
+unrest
+scowled
+suppressing
+cosy
+Appropriate
+Buddha
+grenades
+grizzly
+sausages
+holler
+frailer
+Ecstasy
+molesting
+ms.
+dropped
+appreciate
+escort
+most feasible
+strewn
+strickens
+tornado
+co-host
+high-end
+sheer
+globe
+spotlighting
+snatching
+champs
+skyrocket
+approximate
+ache
+harassments
+Impatiently
+institutes
+vibe
+customizes
+chalk
+furs
+prehistoric
+painting
+terrifies
+surer
+jesus
+intellectual
+aligns
+Draft
+cooperatives
+goings
+lagoon
+pats
+expectancy
+eyesight
+module
+template
+more righteous
+provision
+misunderstandings
+sluggisher
+lofty
+servers
+unpredictable
+Reserved
+dragging
+memories
+terrorized
+elms
+rhymed
+gis
+infrastructure
+evaporates
+unmarried
+swims
+displayed
+sail
+sleepier
+describe
+nip
+slaughtered
+fitter
+muscle
+Improper
+barbs
+swifter
+heel
+Progressive
+misfortune
+trios
+tripped
+inscribing
+Spell
+dares
+Enforcements
+Clear
+couch
+realistic
+donors
+amber
+autobiographies
+Painfully
+sorrowed
+fairly
+rapidest
+appreciates
+dependency
+nourishes
+meticulously
+contractions
+re-establish
+stellar
+penetrate
+beards
+suppresses
+flinch
+hardship
+tier
+flowering
+tones
+snapshots
+overdue
+fundamental
+riders
+genuine
+raised
+insistent
+cheer
+setback
+Continent
+random
+scattered
+muting
+castled
+stunts
+investments
+damping
+unveil
+excel
+clothed
+tames
+great
+thrown
+summon
+more feasible
+mathematicians
+throng
+jetting
+knives
+orchards
+fugitives
+Rabbi
+animosities
+rebels
+devastatings
+buffeted
+most foolish
+thirstier
+faltered
+stabbed
+slumping
+electric
+Alien
+transits
+same-sex
+pathology
+Software
+referrals
+german
+fools
+faced
+symposia
+bikers
+forty-five
+verification
+primed
+flicks
+progressively
+toothache
+musing
+elaborating
+decays
+twinkle
+Constellations
+hotdogs
+washer
+chronicles
+gossiped
+Nationwide
+more abundant
+Imperfect
+statutory
+Consciousness
+heartland
+realm
+mini
+violinist
+consciously
+beaux
+more eventual
+nicer
+married
+mildly
+Cant
+clothes
+multilateral
+suggests
+sideline
+Specially
+coned
+tavern
+computes
+suspecting
+feuding
+root
+brisk
+grafted
+the
+composes
+nylon
+ignited
+subdivisions
+coordinated
+squared
+landmark
+homicide
+clicks
+bros
+brooded
+insert
+transmit
+leafing
+poising
+geographers
+protections
+chiefly
+artisans
+Crazy
+federal
+wincing
+complement
+beak
+Connected
+roadblocks
+mentor
+protected
+mingles
+subtracting
+slaving
+puff
+barrier
+cataloguing
+paragraphs
+sustainability
+mountains
+junks
+shipped
+intriguing
+unchastity
+confusion
+deserts
+pedestrian
+problem-solving
+bulldogs
+archaeologists
+noticeably
+emphasize
+bookshelf
+setup
+skill
+we
+hospitality
+healthy
+excavate
+various
+nerves
+affiliation
+transfers
+hastens
+favorable
+negro
+processes
+preface
+tongued
+reproducing
+voyaging
+fixing
+degree
+everyone
+colours
+breakdown
+fiances
+stools
+breed
+overeat
+algorithm
+impairments
+integral
+maternal
+Alternatively
+notations
+outsets
+base
+listening
+Of
+assemblies
+stricter
+sudden
+kiting
+ruled
+mustache
+roadway
+downing
+revolting
+amputation
+highlighted
+huge
+industrialization
+gathered
+clam
+bad
+academically
+economical
+tainting
+situations
+sunk
+heads
+Boiling
+severing
+more marginal
+robe
+electronically
+ripest
+flashlights
+position
+delaying
+qualification
+noising
+distresses
+sample
+more tedious
+retiring
+watcher
+bikes
+badges
+tingling
+plans
+owls
+ushering
+trinity
+supporting
+degrading
+inserted
+deed
+juggle
+strickened
+obligation
+Unknown
+normals
+flooding
+gayer
+wilderness
+qualifications
+giggling
+raccoon
+moderated
+squeaking
+majorities
+typing
+most similar
+tenuous
+pick
+vigils
+tradition
+garlands
+talker
+mock
+intervention
+grooving
+hatching
+bailing
+wobbling
+beads
+vibrating
+mangoes
+behaving
+arrogant
+March
+prays
+secret
+alluded
+rinsed
+biceps
+sighted
+income
+Downstairs
+bookstore
+hosts
+caricaturing
+regard
+shrewd
+vies
+projectors
+old-fashioned
+shutting
+order
+grumbling
+panda
+mooned
+complained
+followings
+regretted
+verified
+freights
+bachelors
+disarming
+operating
+even
+discoveries
+smokiest
+zincing
+jockeys
+divergent
+signaled
+under
+organizer
+queers
+Jesus
+derivative
+purports
+faculties
+known
+conglomerates
+loaned
+preschoolers
+maple
+pervaded
+tend
+outright
+convene
+videotape
+specially
+backlashed
+pits
+oils
+swats
+bouncing
+replied
+chapel
+cadres
+competencies
+drown
+canoe
+mined
+Intellectual
+bossed
+carbon
+viability
+agendas
+syllables
+pretest
+psychologically
+fiscal
+preliminary
+glooming
+kilograms
+Off
+provokes
+palates
+close-up
+auditorium
+bin
+harmonies
+elusive
+innovation
+cumin
+trauma
+wherever
+confuse
+Diagonal
+hounding
+ho
+wounding
+contemporary
+appetizers
+oozing
+Depressive
+Backwards
+understood
+Metallic
+biomass
+consideration
+lighted
+shimmer
+condemn
+outraging
+footnotes
+scalps
+Scopes
+courageous
+steadying
+attachments
+crampeder
+iced
+sermon
+marbled
+evolved
+levee
+logic
+rounding
+unconstitutional
+onslaught
+vigorously
+lender
+skyline
+request
+undergoing
+quilts
+fortnights
+pub
+copy
+teenager
+intention
+conducted
+batons
+dissertations
+alcoholic
+balanced
+injures
+allocates
+flecked
+rated
+exploded
+neither
+tour
+smack
+bats
+more tragic
+comedies
+shopper
+eclipsing
+quantitative
+stay
+more ordinary
+universes
+saint
+tags
+siblings
+listened
+Northwest
+wrestle
+subtlest
+flurry
+trudge
+vulgarest
+defeat
+battalions
+gods
+most fashionable
+evident
+passive
+scatter
+glooms
+browned
+rewrote
+huddling
+backcountry
+ardenter
+creole
+without
+revise
+frayed
+horizontal
+porridge
+coarse
+dash
+stole
+chronicling
+retaliation
+suspicion
+pearled
+remedying
+correctly
+tired
+examiner
+waiting-rooms
+thronged
+starker
+barker
+exacter
+efficiently
+postulating
+diagnosed
+homesteads
+griping
+constant
+neighbour
+professor
+demanding
+Web
+juices
+aliened
+rigid
+refutes
+commodity
+commenced
+keywords
+nipple
+blondest
+Cougar
+rub
+River
+handicappeds
+Starks
+improving
+ageing
+sack
+roadblock
+obstruct
+Outdoor
+bask
+tufts
+High
+ducking
+unbelievably
+wetted
+tidies
+bitched
+humps
+commands
+cypress
+tick
+Croatian
+prospering
+disseminates
+Railroad
+teenage
+repairmen
+demonstrators
+diffusion
+hell
+shortcoming
+devastate
+maritime
+bashes
+interpret
+branded
+cringe
+chess
+speaker
+charmed
+awfuller
+price
+dissolved
+bloodshed
+undoes
+fascinate
+barbecuing
+pullout
+dolls
+serviced
+Vicious
+counseling
+Humanities
+greece
+CAN
+Bible
+roommates
+contrast
+betters
+proposed
+sparks
+sieging
+tendons
+liked
+external
+handshakes
+most intrusive
+desirable
+wholesale
+explains
+step
+ideology
+fears
+auditioned
+revolved
+spicing
+outgoings
+fur
+hammered
+Road
+apter
+Lunar
+revenues
+visibility
+mailbox
+abdomen
+scout
+elude
+shoppers
+clump
+garments
+logically
+interpreters
+implicitly
+dirtiest
+sage
+mourns
+Frozen
+Arabic
+suggestion
+damp
+aggregate
+delinquency
+forecasts
+editorials
+That
+formulae
+reversal
+Aerial
+Color
+shrewder
+love
+mobilized
+spectator
+terrific
+caribbean
+flannels
+jury
+ambushes
+drives
+disliked
+chucking
+suspension
+balder
+afflicting
+counting
+saucepan
+literacy
+premium
+clamming
+most economical
+determinant
+golfed
+darling
+rarest
+buyer
+pickups
+hearts
+Active
+outside
+trot
+barge
+downloading
+maker
+stewed
+trick
+endorsement
+rude
+arousals
+protester
+more conspicuous
+garment
+passageway
+contempt
+malaria
+grind
+mammogram
+nurse
+defaulting
+prose
+blighting
+ignored
+varies
+PC
+modes
+captioned
+binge
+brothers
+reportedly
+bribing
+Courageous
+taints
+commanding
+two-timed
+irrationals
+bat
+beef
+pledging
+sleek
+candles
+hungers
+broccoli
+thinks
+attires
+living
+feeds
+energetic
+landlords
+disappearance
+Mission
+popping
+ski
+enrolling
+Cracked
+background
+nibbled
+absentees
+nastier
+Mental
+jugged
+pagan
+Keeper
+cartel
+dive
+glints
+constituting
+belles
+salesgirls
+Sixteen
+pleads
+mammoths
+ordinary
+pawned
+ketchup
+cubing
+adrenaline
+issuer
+readiest
+commuted
+Ruby
+boulevards
+more relevant
+Wounded
+unsettle
+correlation
+toiled
+Original
+move
+eruptions
+masks
+outfielders
+swollen
+admirals
+federalist
+sparkled
+rivets
+prizing
+rediscovering
+tales
+soap
+crackles
+roles
+provides
+survivals
+variance
+department
+skipping
+blueprints
+ragged
+slotted
+kicks
+huts
+picketing
+skiers
+thesis
+destroyers
+restructures
+click
+tutored
+stylishness
+delicates
+rows
+dedicates
+blocked
+funnelling
+asia
+readied
+meshes
+blanker
+espoused
+hangar
+religiously
+profounder
+theirs
+assault
+dominated
+maximized
+substitutes
+tram
+headnotes
+Echo
+more profitable
+requiring
+undergraduate
+combustions
+Improbable
+applauding
+terminals
+wonderful
+widowing
+preventives
+Bloody
+backpacked
+Born
+ghosts
+booming
+YES
+rooftops
+micro
+platoon
+lantern
+leaping
+sprung
+insults
+layering
+arming
+sufficed
+menaced
+stillnesses
+Dirt
+board
+notables
+intimacy
+habit
+lagging
+player
+stiffening
+recruits
+sluggishest
+fillets
+allow
+bash
+consenting
+dream
+usher
+illest
+regimen
+enclave
+boiled
+savors
+evenly
+spiked
+regaining
+caps
+mild
+feasted
+loophole
+simulating
+meltdowns
+contexts
+clothing
+concurred
+abrupter
+rich
+sublimed
+GUT
+Voids
+populists
+assistance
+dedicate
+more energetic
+centerpiece
+riper
+renovating
+sideway
+twinned
+shaman
+prowled
+substituted
+homelessness
+offstages
+honorary
+six-month
+preludes
+severs
+conception
+channeling
+gunshot
+Frenchman
+couture
+burning
+chopper
+natives
+outnumbered
+Unimportant
+perking
+Racial
+perpetuates
+zippers
+Colombian
+deflects
+workmen
+resignations
+proper
+hypothesizes
+cooing
+facts
+alike
+marker
+toweling
+infant
+revived
+first-round
+Sucker
+incidental
+amputations
+relatives
+communicating
+alcohol
+ride
+disturbing
+appetite
+visualizes
+gayest
+downtown
+quietly
+functioning
+Elevated
+soliciting
+resolving
+creaking
+appreciated
+napa
+booklets
+Arab
+unattractive
+trousers
+cite
+garages
+reasoning
+determinants
+revisited
+danger
+baked
+patronage
+flaring
+fittings
+Sub-Saharan
+pinched
+standardize
+skyrocketing
+region
+southeast
+doorway
+wiping
+illustration
+comedian
+triangle
+striping
+toasted
+checkouts
+Forceful
+Saudi
+attested
+secretly
+celebrating
+nonprofit
+schemas
+trumpeting
+most cynical
+floundered
+simulated
+applause
+last
+along
+spading
+kidnapping
+graced
+challenger
+villains
+digests
+vodkas
+explain
+locking
+absence
+unfairly
+gulping
+bumming
+indictment
+august
+tempted
+restorations
+colonel
+puppets
+set
+attached
+bean
+discharged
+born
+parodies
+painfully
+wrongdoing
+deity
+intern
+Posteriors
+adjusted
+echoing
+clouded
+spoilt
+fending
+sometimes
+deepened
+moon
+exemplifying
+yelped
+sightsee
+farewells
+squirmed
+allusion
+privatization
+auras
+existence
+mistrusted
+thumbs
+he
+bunnies
+eastern
+motherly
+worsens
+appoints
+understandable
+Info
+unofficial
+democrat
+servicing
+analyze
+sparsest
+genders
+gang
+appeared
+tellers
+fund-raising
+relaxation
+posited
+front-runner
+dictations
+resumes
+imam
+race
+paved
+clears
+context
+trumpets
+neutrons
+planner
+skeptical
+reservoir
+trigger
+meat
+churn
+tear
+bravery
+skirting
+boats
+creeping
+onto
+radical
+drama
+sniff
+impacted
+deepening
+Porcelain
+millimetres
+effected
+most insecure
+housewives
+palm
+dispatcher
+smallest
+perpetrated
+join
+Brazilians
+ledge
+real-world
+figuring
+reap
+small-town
+barricades
+downsized
+aiming
+maidens
+horrifies
+strolled
+ginned
+bewildered
+analyst
+commons
+gay
+spills
+Radio
+compress
+crabbing
+chilled
+straps
+confidence
+workshop
+glamor
+legitimizing
+corridor
+pig
+stem
+cruel
+pinnacling
+behave
+tenant
+catholic
+inspect
+dealings
+nontraditional
+convergence
+seldom
+seats
+contrasting
+indict
+pretesting
+more explicit
+regular
+razors
+lacrosse
+relocation
+terrorist
+legitimated
+waterfall
+pointer
+emergence
+staunching
+weavers
+extraordinarily
+scheming
+streets
+touchdowns
+emulate
+celery
+proofed
+garlic
+antique
+necked
+center
+peacekeepers
+then
+someone
+pant
+forbidding
+faring
+tabs
+metrics
+baths
+rulers
+farm
+guises
+virus
+cartels
+chattering
+Sky
+hedging
+buttoning
+worthy
+waterproofing
+boulevard
+adulthood
+grassland
+use
+rationals
+extensively
+antennae
+oxide
+fitting
+swirling
+ceasing
+washers
+tuna
+baled
+oak
+rebounds
+apostle
+pharmacist
+Depositions
+heroine
+carpets
+guessing
+haunts
+Utopian
+awaking
+advocate
+blitzed
+pediatricians
+preparations
+Denial
+broadens
+texturing
+foreseeing
+Counter
+ankles
+impeachments
+delta
+ardent
+stinted
+Germany
+muffin
+sweetheart
+Safari
+candies
+briskly
+rustling
+priority
+officer
+tripled
+overlays
+mansions
+endlessly
+rapped
+spotted
+pinged
+chart
+complimenting
+trotting
+outdated
+columns
+excavations
+gag
+gaiting
+e-mail
+grouped
+wishes
+shaped
+gum
+cosmos
+disapproval
+adhere
+offered
+suite
+knocks
+nominates
+illuminated
+doc
+sauced
+doubled
+rediscovered
+trying
+Metabolic
+our
+mantling
+materializing
+griffin
+strive
+rides
+incidence
+supplements
+do
+sparser
+smelling
+sincerest
+raining
+Boon
+stabled
+contracts
+furnishings
+restless
+balance
+shins
+co-worker
+dime
+categorized
+treadmill
+delis
+torques
+suggest
+entertain
+fulfill
+intentionally
+spelling
+Northeast
+clogs
+loaves
+obstructing
+swerves
+greasy
+odd
+entitles
+thyroids
+pounding
+relishes
+shoving
+cases
+fluffiest
+glaze
+bun
+brushed
+transcribed
+secondaries
+achieved
+upgrades
+erupted
+procedures
+interactive
+straddled
+gating
+fluxing
+outweighing
+slot
+bundled
+washed
+most inconvenient
+termite
+spectators
+hungering
+african
+shallower
+supremacy
+tickets
+administrator
+devices
+charters
+generations
+notch
+welshing
+compliment
+discovers
+earmarking
+Reins
+packet
+on
+nervously
+vitalities
+saucer
+hound
+stumble
+boozes
+himself
+auditor
+sloppier
+drained
+hazed
+singing
+browses
+pesticides
+frameworks
+fracturing
+paddle
+camouflaged
+ineffective
+murky
+Plentys
+copies
+Pioneering
+effectively
+attempt
+sponsor
+engender
+reference
+porch
+retails
+most recent
+summons
+communist
+prices
+acute
+compiles
+Aspens
+presses
+thirteen
+lug
+wheel
+freedoms
+oppressing
+imprisons
+bellows
+intruded
+analysis
+aspen
+criticisms
+polished
+pinches
+backing
+deleted
+unseen
+alerts
+complicating
+tolling
+line
+rendezvouses
+abdomens
+ruffle
+fable
+protons
+jeopardized
+tribute
+saved
+mayors
+Manila
+stating
+referee
+lobbies
+kidnap
+South
+feared
+indeed
+stylistic
+triumphant
+ebbed
+derived
+gliding
+steels
+colorful
+applicants
+subsidized
+thug
+America
+innovations
+mighty
+Creatives
+Audio
+precedent
+framework
+qualitative
+seminary
+Insistent
+consortia
+barbecue
+Definitive
+stalling
+accentuates
+intestine
+tofu
+jumbled
+shabbier
+situated
+hive
+clog
+rebate
+tracks
+eradicating
+appointments
+twenty-two
+legs
+lumber
+cord
+sung
+credibility
+kins
+riots
+categorize
+environment
+richly
+kited
+commissioned
+inflation
+Fighting
+rehabilitate
+busted
+inscriptions
+adapt
+augmented
+skillet
+undertake
+flag
+fanned
+snacked
+portraying
+snowflake
+more enjoyable
+Sober
+consents
+displaces
+violates
+reunions
+cosmopolitan
+clustered
+masquerading
+monsters
+citrus
+dales
+menaces
+caller
+corralled
+selected
+reverencing
+ignorance
+Mogul
+hampered
+attend
+whimper
+reprimands
+revealed
+sports
+individual
+straight
+northwest
+pirated
+Pastes
+induce
+Prose
+Collegiate
+foundations
+depiction
+mileage
+medalling
+stock
+barks
+bolsters
+anonymity
+hole
+Alcoholic
+unfolds
+containment
+intersections
+humiliation
+Contentious
+Stone
+nodded
+hopped
+Arctic
+picturing
+Broken
+Racist
+aye
+winning
+wigs
+paradigm
+recordings
+hysteria
+Prince
+fogs
+blocking
+visitation
+Magical
+playoff
+cab
+exert
+buzzed
+corporation
+tensions
+fed
+forceful
+tinkered
+flocked
+pickup
+mallards
+garrison
+spin
+collision
+liability
+most charitable
+Polos
+harp
+overshadow
+provisional
+postmen
+indicates
+slits
+painters
+Noble
+concession
+heeling
+grittier
+britain
+virtual
+stills
+famously
+wronging
+comprised
+rustics
+Bold
+stirred
+there
+spines
+precarious
+eyebrows
+Unlimited
+hug
+flavour
+deviations
+dragon
+marinade
+exhale
+clogged
+scholarships
+neutron
+more sensational
+equality
+hatred
+reopening
+essay
+novelists
+crowding
+bubbled
+compacted
+logging
+showed
+crunching
+dwarves
+relocations
+most proficient
+immature
+torso
+England
+wearables
+fountains
+aping
+grosses
+coo
+insulted
+vaccine
+ambers
+payments
+Memorial
+funnels
+material
+sentence
+fabricating
+sharper
+watermelon
+dad
+insofar
+appropriately
+challenge
+defend
+giddying
+prophet
+misleads
+conceptualizations
+denounce
+grimmest
+alarms
+Another
+deviation
+collide
+proms
+undesirable
+yell
+cleaner
+leave
+wreckage
+scooters
+most assertive
+understands
+personalizing
+angers
+troublesome
+more ironic
+convinced
+latitude
+filibuster
+routs
+crystal
+greatly
+insanest
+Fantasy
+subscribed
+darlings
+imperialism
+shah
+remembers
+album
+clearings
+fulfillments
+taut
+trivials
+sandwich
+jaw
+nurtures
+thailand
+built-in
+humours
+professing
+sucks
+won
+knuckle
+barrener
+watercolor
+vegetarian
+stepped
+pasta
+wanting
+precious
+instances
+dusks
+redemption
+sharpeners
+wadded
+snack
+graces
+parked
+stooping
+outgoing
+So
+neutrinos
+outings
+windier
+confronts
+deters
+conflict
+shoreline
+refute
+dozing
+mistake
+chestnuts
+Deductible
+thing
+allusions
+offer
+coverage
+hobbles
+downsizing
+raspberries
+coupled
+density
+topping
+jut
+options
+Jupiter
+eternities
+plate
+immunity
+delinquents
+glitter
+barging
+hotlines
+hospitalize
+blends
+cleanses
+spanned
+picnicked
+environmentalists
+cram
+devoutest
+renewing
+debuting
+structural
+naming
+overlook
+groping
+policewoman
+ruthless
+maintains
+Parisians
+marvel
+grieves
+reconnaissance
+homosexuality
+substance
+slyest
+bill
+organizations
+demolished
+revering
+comebacks
+systems
+deployments
+subsidizing
+itineraries
+yields
+care
+hairy
+cloudiest
+careless
+afterwards
+upbringing
+allure
+skinned
+reminds
+paging
+flexing
+Tart
+runway
+inconvenient
+pay
+awakes
+insurgent
+objects
+bolder
+stinks
+September
+differentiates
+glistening
+singular
+exiling
+placements
+lugs
+pimps
+as
+bras
+crust
+sells
+prolongs
+gloomed
+talked
+pride
+Euro
+perfecting
+cats
+yen
+developers
+calculate
+elaborate
+parlors
+humiliations
+jetted
+heaviest
+splashes
+Optimum
+solidified
+echo
+who
+Prohibition
+needs
+Motor
+spruces
+piped
+hanks
+volunteering
+lion
+wring
+concussion
+paramilitary
+narrowest
+slaughters
+shrilling
+loops
+OK
+most murderous
+liberalization
+Licensed
+rapist
+sprang
+constituted
+Colonization
+significance
+aims
+contaminants
+this
+sandiest
+drum
+raven
+Detailed
+warehouses
+transplant
+most massive
+spikes
+ironically
+aunt
+grievances
+clumping
+reader
+self
+assertive
+genres
+pins
+freeze
+memorized
+medieval
+tad
+seminar
+kitted
+mad
+reflexions
+arrives
+saucers
+Nominal
+his
+climates
+sixty
+tortoise
+Watershed
+refreshed
+artificially
+transients
+wit
+mecca
+crashed
+topic
+clinic
+revive
+robbed
+more credible
+punishment
+poetic
+most rebellious
+yarning
+likes
+auxiliaries
+accessing
+isolated
+fight
+Alarm
+views
+blackberry
+daycares
+Unfamiliar
+espousing
+gouge
+consultations
+glides
+exposes
+gutters
+patsies
+estimates
+blurred
+abler
+waged
+kidded
+neutrino
+fostering
+torrents
+lawmakers
+infect
+nested
+smashed
+articulations
+prosecutor
+savvies
+whoop
+politician
+enveloping
+most generic
+reforming
+package
+temper
+fundamentalism
+computers
+compare
+awakening
+conducting
+glossy
+refuge
+creed
+terming
+dentists
+waned
+pot
+freestyling
+loneliness
+edges
+hones
+swallowed
+cut-off
+tarp
+euro
+fruits
+respective
+soils
+canopied
+texts
+doll
+eeriest
+daughter
+county
+bum
+muttering
+compounding
+Just
+below
+drug
+Attraction
+recognition
+hefty
+pretense
+teaching
+prisons
+spades
+smiling
+Residual
+persuasive
+adolescent
+spouts
+mast
+chick
+enjoys
+furious
+roundabouts
+syncing
+exclusion
+clinician
+pakistani
+advance
+complying
+smothers
+minivans
+galloping
+suitable
+witness
+motorists
+dictionaries
+neutralizing
+bulked
+gloves
+penetrating
+screenplay
+hottest
+attacking
+gouged
+biopsy
+pointed
+fridges
+roughest
+headlining
+Oxford
+vetoes
+bombing
+relieving
+Romanian
+ion
+saws
+lighten
+new
+distrusted
+quads
+polishes
+graze
+post
+affiliated
+deducts
+topped
+striped
+envoy
+Six
+rubbles
+drilling
+intelligence
+joints
+drake
+considerable
+overload
+bathed
+lusted
+likens
+sincerely
+boundary
+lesioning
+copes
+yahoos
+gardening
+captors
+anecdotes
+aggregates
+phrase
+coating
+discharges
+remembrances
+strongest
+diverser
+realizing
+avoided
+re-creates
+stalking
+army
+moles
+unloaded
+portraits
+hub
+imparted
+correctness
+parishes
+plighting
+holocaust
+exchanged
+accompanied
+members
+housekeeper
+barefoot
+vein
+impossible
+sweats
+asparaguses
+became
+violence
+impositions
+Overweight
+semiconductors
+represents
+mugged
+schoolchild
+throat
+recovery
+Fell
+plop
+awaken
+agreeing
+astonishing
+promotes
+reuse
+booed
+divorced
+tobaccos
+aluminium
+glitters
+rinse
+synthetics
+Educated
+catering
+fifties
+raider
+burrows
+aridest
+exploiting
+cliches
+most conceivable
+classic
+Gypsy
+stats
+eyeballing
+madames
+outpatient
+endure
+rhythm
+tattooing
+dealt
+built
+produce
+explosives
+conjunction
+burrowed
+specialties
+Architecture
+shut
+brewery
+hurt
+shower
+plug
+paling
+lawyers
+winters
+robber
+cubbed
+smudged
+loneliest
+more nervous
+reviewers
+gusts
+venture
+peninsulas
+decoying
+separatists
+troll
+standards
+spaces
+bellowed
+Weekly
+composed
+toting
+genera
+community-based
+couched
+handkerchiefs
+cornerstone
+Mindful
+cricket
+brotherhoods
+thyme
+decent
+tinier
+visualize
+gripes
+clerical
+sculpted
+Nigger
+nineteen
+kernels
+distinguish
+pineapple
+Jesuit
+latent
+meters
+paining
+Maya
+balding
+jolt
+shirts
+fingerprints
+develops
+bred
+relinquish
+spouted
+delegated
+cousins
+chaps
+persian
+buddies
+bathes
+most analogous
+Adorable
+concepts
+braids
+buoyed
+authority
+twice
+puppies
+Disturbed
+workings
+seeding
+sneak
+fluffy
+predicts
+nap
+backfires
+affording
+fiddles
+behaviour
+affirmative
+allowance
+keepers
+nominated
+holistic
+sherry
+scent
+fulfils
+pandemic
+kids
+prostheses
+that
+injured
+generates
+magistrate
+belonged
+slanted
+exemplified
+Devoted
+fronts
+socialized
+invokes
+infantry
+sours
+burrow
+conspiring
+pioneers
+chirping
+architect
+marbling
+mood
+munches
+trashed
+celebrates
+Our
+Upbeat
+ducks
+hacking
+operators
+lumped
+pleasanter
+Corporate
+boulder
+most toxic
+duplicate
+immerse
+shortstop
+cluster
+ducts
+x-rayed
+most ordinary
+imprisoned
+weighing
+collecting
+fisher
+mostly
+participant
+inferior
+Intensive
+yard
+vendor
+beneficiary
+Greed
+surrogated
+robbers
+Cuisine
+unavailable
+lifestyle
+Anglo
+welshes
+saltiest
+Uniform
+faceted
+kips
+monstrous
+dropping
+smoothly
+planetary
+pricking
+sunflower
+pro
+Lebanese
+scariest
+minimizing
+complicated
+golfers
+anti-american
+House
+proclamation
+complaint
+Medically
+upwards
+Mischief
+no
+skills
+Chief
+Intel
+wed
+term
+subjecting
+unlimited
+kickoffs
+preserves
+those
+tacking
+Ornament
+internet
+gradually
+skidded
+cumulative
+scapegoat
+byzantine
+public
+boom
+modality
+roosters
+bridging
+encompassed
+tempering
+prescribed
+pitfall
+queries
+adventure
+stomached
+accumulations
+ascending
+summertime
+flakes
+wavers
+steam
+citations
+mathematical
+growl
+Similar
+slay
+sedans
+spawn
+beans
+democrats
+burgeoning
+delight
+bachelor
+instant
+Vodka
+rounds
+references
+highly
+semantic
+basis
+disrupted
+gladder
+cylinders
+snarl
+cautiously
+Um
+marring
+vegetable
+mosque
+Orients
+confidently
+planked
+polishing
+choirs
+fortunately
+insider
+adversities
+shareholder
+stomps
+wails
+eucalyptuses
+explanatory
+assesses
+pierced
+clouting
+fir
+vast
+reversing
+scrutinies
+compressing
+slapping
+tinged
+comments
+cheerfully
+Hard-core
+proven
+securities
+marine
+knitting
+rediscovers
+Confederates
+bulkier
+Logo
+Mystical
+refereed
+immortal
+dam
+underestimated
+operations
+growled
+counterproductive
+platforms
+borough
+asylums
+shanked
+euros
+missions
+kayaking
+fictions
+Medieval
+inventive
+rift
+mum
+chore
+funnel
+bitterest
+demonstration
+dispatching
+summarized
+upright
+syllable
+suburb
+Allies
+brand-new
+shading
+sync
+digested
+envisioned
+devastates
+explored
+limousines
+rink
+marginalizes
+dances
+budget
+syndicated
+biomedical
+petitioned
+most repressive
+Dialogues
+designates
+nuts
+foreman
+temple
+exceptionally
+fleeting
+courtesy
+penalize
+bettered
+campfires
+clapping
+publishers
+Perception
+deposits
+debt
+harnessed
+births
+tickle
+flushing
+autographing
+foster
+tramples
+pyramids
+plighted
+plantation
+Cycling
+huger
+conceded
+inward
+hawk
+spoke
+buys
+Emotional
+taxied
+For
+dropouts
+legislator
+subsidy
+thundered
+jellies
+sultans
+stunned
+blinded
+basked
+sweatier
+israel
+boiling
+projector
+shielding
+clinging
+intensity
+performed
+enjoy
+shooting
+residual
+exposure
+mural
+rusticker
+banqueting
+tinkers
+refereeing
+creased
+thief
+revival
+ousted
+character
+more poignant
+most oppressive
+venting
+anglican
+charitable
+croatian
+twenty
+more careful
+administering
+frontier
+Serial
+corking
+best-known
+beholds
+tents
+kindergarten
+ankle
+landfill
+single
+curiosities
+hoof
+prisoner
+plenty
+sways
+fleshed
+transformations
+disordering
+Yahoo
+Czech
+californians
+gypsy
+carriages
+clergy
+marbles
+Backstage
+represent
+craning
+approximated
+historians
+Complicated
+confides
+stairway
+rendering
+merchants
+front-paged
+pioneer
+postal
+inventor
+Stepmother
+lace
+crew
+fringe
+keys
+gram
+hind
+shanks
+lilies
+firmest
+simmers
+unfavorable
+cheddar
+rule
+inquiries
+husbands
+retail
+inherit
+subpoenaed
+pertained
+evangelists
+olives
+cereal
+deceives
+molecular
+belted
+ply
+resents
+broke
+Midwest
+reprimanding
+covers
+soprano
+reassuring
+foundation
+blight
+committed
+closings
+electorates
+conspired
+inaccurate
+mountain
+induced
+eggs
+indication
+expo
+most fruitful
+lapels
+carelessly
+presuming
+rewrites
+Knob
+swiftest
+propelled
+lodged
+crosstalked
+torquing
+legislators
+inconsistency
+vomits
+grimaced
+wide
+degradation
+automated
+vietnamese
+mire
+realise
+clenching
+bacterial
+gays
+verticals
+thanked
+traditional
+gotten
+morality
+housework
+cautions
+Thy
+browsing
+driven
+delighted
+ward
+nations
+pry
+sunburned
+thrash
+annoys
+tropical
+deficits
+accuracy
+cut
+Bomb
+insisting
+lovers
+fieriest
+tandems
+unnatural
+forgiving
+parasites
+filed
+ha
+more troublesome
+vie
+strolling
+fuzzy
+swung
+establish
+wrapping
+lovely
+intolerance
+hierarchy
+events
+urban
+shocked
+villagers
+monitoring
+brain
+victor
+merit
+ore
+refugee
+circulars
+dentist
+anthropologist
+Hopeful
+landfills
+lusts
+respondent
+sneakers
+okays
+Wireless
+colourful
+Sewer
+photographers
+loved
+marble
+orphans
+disclosing
+montage
+sparking
+cloning
+petty
+horizon
+sleeves
+partition
+potent
+dazing
+formations
+finances
+kilos
+loses
+boarding
+apprenticing
+nets
+sheds
+deputies
+teacher-librarian
+fan
+buffs
+students
+glad
+hopefully
+pi
+cloves
+gracefully
+simplifying
+medically
+upsetting
+journey
+rewritten
+far
+cholesterol
+supposed
+most prosperous
+cities
+most repetitive
+feasting
+lacked
+baffle
+telephoning
+yugoslav
+saints
+colonization
+greeting
+arising
+indoors
+swifts
+heightened
+entertained
+lowland
+worsening
+surface
+milestones
+Upside
+Me
+masquerade
+cultivated
+patches
+rite
+invitations
+wanning
+itself
+denim
+endow
+wafts
+fives
+tanning
+navy
+corral
+soiled
+dizzy
+squint
+savaged
+lusting
+males
+pie
+endemic
+gladdest
+Snakes
+concluding
+manufacturers
+pardoning
+stitching
+capacity
+tutors
+dinosaurs
+more affordable
+towing
+mortal
+sifting
+conscience
+simulations
+remaking
+tethered
+implicates
+nesting
+hello
+heavier
+primes
+diplomat
+closed
+damns
+specifies
+addressed
+depression
+prods
+chopping
+concede
+wearable
+wallets
+fool
+yearn
+tunes
+buffer
+alerting
+skinniest
+rallied
+smother
+Fatal
+aspirin
+complimented
+dismissing
+cool
+vitality
+Seven
+prospered
+evaluation
+pictured
+zoos
+tidiest
+industrializes
+mutations
+hesitates
+runs
+incites
+partially
+recurrences
+fate
+share
+influencing
+semicircle
+restrain
+casino
+fewer
+methodology
+more proficient
+fathered
+sensation
+seminal
+cuba
+several
+cork
+appreciation
+hatch
+break
+courting
+deterrents
+squirted
+condition
+glimmer
+business
+relativity
+computerized
+dedicated
+trended
+suffering
+creditor
+sighed
+pinks
+discs
+Robins
+tint
+valley
+sheen
+States
+railways
+mitt
+compensation
+acquits
+debut
+coming
+ladder
+visionary
+harvest
+Fantastic
+chromosome
+caressed
+Open
+streaking
+conglomerated
+occupants
+wrench
+reproductive
+disrupt
+Spain
+fridge
+confronted
+pinning
+most flexible
+reeling
+bastards
+sit
+imagining
+Upper
+afford
+blue
+dent
+swapping
+rove
+being
+affect
+subset
+rebuild
+hems
+hissing
+jupiter
+acquainting
+dribbled
+hose
+notifications
+shaken
+arcing
+Hogans
+shot
+clocks
+somebody
+spamming
+Topical
+pounded
+most unofficial
+hands-on
+curtails
+manages
+graveyard
+researched
+broadest
+staffed
+struck
+dipped
+pier
+principal
+Physically
+more sophisticated
+mellow
+brigaded
+voiced
+proceeds
+tragedies
+addressing
+endings
+grassy
+sprint
+uncomfortable
+clashed
+spec
+re-examining
+forgetting
+storytelling
+throats
+transported
+transiting
+cosying
+annex
+best-selling
+rebel
+willows
+tripods
+piloting
+jigging
+labour
+marathon
+Ales
+lunch
+tiding
+sum
+pensioned
+touchdown
+waited
+tenting
+handkerchief
+messiest
+clown
+foyers
+Elastic
+exclusively
+umpire
+seat
+somali
+battered
+grammes
+volts
+highlighting
+indulging
+replace
+tweaked
+skated
+fashioning
+indian
+IQ
+twelve
+ivy
+displacing
+crooks
+lulling
+diapered
+contraceptive
+concerted
+hovered
+praising
+beautiful
+inhaled
+households
+displaced
+asshole
+disputed
+erupting
+objective
+socking
+quit
+idealistic
+mutter
+co-hosting
+attendance
+cubic
+high-rise
+protect
+organization
+graver
+renew
+angering
+uncanny
+thicken
+tended
+flipping
+lost
+outlaw
+victorian
+notion
+biochemisity
+Lily
+string
+analyzing
+Beta
+network
+divide
+Bravely
+swedish
+wowed
+knuckles
+plastics
+folly
+lurching
+worldviews
+testaments
+crosses
+criminal
+gorging
+unequal
+trafficking
+sophistication
+scoop
+good-byes
+Those
+openness
+fastballs
+incredibly
+helps
+honking
+hijacker
+idles
+creasing
+explaining
+victors
+rioting
+actually
+toted
+softly
+shakers
+discredits
+exhaled
+arose
+campsite
+knuckled
+construe
+lowering
+stupid
+parent
+sexually
+dumbest
+slower
+persona
+totally
+Vital
+plutonium
+furrows
+bring
+challenged
+instance
+romanced
+impersonal
+shoulders
+spotting
+pillow
+compensations
+rent
+erecting
+indoor
+dollied
+infiltrates
+moody
+coexistence
+supplemental
+Deed
+ta
+two-times
+pooling
+gated
+tenderer
+untidy
+marginalize
+truces
+franchised
+heartiest
+most cheerful
+examiners
+curious
+Bar
+Modernization
+twitched
+pinching
+managers
+derbies
+fling
+diffuses
+posturing
+dukes
+pedagogical
+sharks
+builds
+acoustic
+veterans
+humbled
+indicts
+hushing
+bitten
+kidneys
+occupancies
+enacts
+thickness
+mentors
+pavilion
+whisked
+challenges
+photography
+banners
+warriors
+mellowest
+queens
+instructor
+token
+telecom
+bounty
+stooped
+interestingly
+counts
+tuberculosis
+anybody
+most obvious
+fantasy
+stewarded
+hugging
+watercolors
+discomfort
+militarily
+illumination
+Juvenile
+pretends
+Metaphor
+consider
+eventual
+destabilizing
+racial
+purport
+study
+microscope
+deteriorated
+humanist
+sociology
+swinging
+sphere
+milked
+lastings
+monkeying
+unsalted
+nostalgia
+indicative
+crank
+triggered
+folded
+slighted
+tolerate
+stark
+searches
+raiding
+meanses
+tenser
+liberals
+striven
+pee
+slivers
+ranching
+most poisonous
+obtain
+screening
+console
+proverbs
+voice-over
+crapping
+personae
+revisit
+resurrect
+reimburse
+barking
+trough
+patrol
+abyss
+lorry
+adequatest
+legislation
+staffings
+grave
+Operational
+inaugurated
+dying
+thaws
+wreaked
+trenched
+examines
+Abundance
+teenagers
+carpet
+sank
+listen
+grassed
+commerce
+penalizes
+Peace
+juncture
+dimmest
+frustrated
+comfortable
+butcher
+cranks
+printed
+criterion
+tails
+leftovers
+picks
+banquet
+oases
+interns
+staining
+behaviours
+access
+Prevailing
+cabin
+destructive
+whoa
+fewest
+disagree
+infusing
+campaigned
+skinner
+gin
+whim
+medic
+benchmarked
+sponsoring
+most expensive
+arises
+crows
+shinings
+prudence
+justifications
+enthusiastic
+deliberation
+Slaves
+meditation
+junior
+edging
+are
+risk-taking
+welcomed
+rebuilds
+jerking
+narrations
+ghost's
+pest
+relevance
+prodding
+affirming
+adjective
+testify
+slogans
+Less
+grow
+charged
+hunt
+limped
+insignificant
+ignitions
+speaks
+suddenest
+proposes
+spatial
+consumer
+marrying
+Sunday
+devaluate
+held
+redeem
+scenery
+grumble
+aiding
+weaver
+casual
+broader
+intervenes
+scoffs
+Dramatic
+identifiable
+encircled
+indicators
+Persuasive
+Blocking
+looms
+mocks
+fireplaces
+basically
+worldwide
+shades
+Bingo
+medians
+discontent
+primates
+publicizes
+foothill
+advancing
+residing
+relays
+rimming
+contraception
+coping
+scholarship
+worms
+intestines
+seeks
+eschew
+yellowest
+sculpt
+loosed
+animosity
+colonialism
+sesames
+torts
+discrepancies
+discussed
+northeastern
+explanation
+airy
+remoted
+propeller
+convenience
+remorsed
+racks
+blitz
+float
+pentagons
+Festivals
+ensuing
+Devil
+circles
+failures
+Fierce
+selects
+reopen
+craved
+since
+show
+jammed
+lunchtime
+collaborative
+sympathize
+potassium
+browning
+Consciously
+Unresolved
+blurring
+vulnerable
+cavaliers
+necessitating
+notoriously
+Nazis
+shadier
+fended
+forges
+harass
+answering
+disadvantaged
+deliberates
+pressed
+scrape
+truths
+lectured
+Suicided
+geed
+chows
+congregation
+wobbles
+delves
+processed
+coroner
+Eve
+dean
+hotline
+south
+contain
+dumbbell
+fragile
+immense
+raged
+pasting
+revising
+Clinically
+obstruction
+discipline
+coastal
+weights
+electrode
+humorous
+primer
+defendants
+yarned
+playboys
+forgive
+Continuing
+specialists
+diploma
+self-concept
+linings
+subdivision
+volcanic
+Hood
+riddles
+tinkering
+standardizing
+variant
+engage
+haunted
+seduced
+burnt
+self-interest
+booking
+crown
+Red
+damages
+mirroring
+pizzas
+simplifies
+esteem
+bustle
+squandering
+seeds
+deploy
+correspondences
+antidote
+employee
+visitor
+gala
+hard-line
+plausible
+awarded
+artifact
+equal
+weekended
+steadily
+onset
+folklore
+juicier
+benefit
+amuses
+alluring
+grown
+beetled
+expert
+tagging
+regimens
+necking
+bud
+loads
+crimson
+ufo
+befriend
+disconnected
+marketing
+muddied
+most
+referees
+smiled
+after-school
+entice
+loving
+stature
+sissies
+pacing
+recent
+disparity
+uniqueness
+spins
+gasoline
+one-half
+molding
+lure
+award
+seal
+Precision
+Twilights
+round
+battled
+imitation
+deductions
+Cantor
+violins
+dose
+ego
+footsteps
+twig
+fastening
+Nigerians
+last-minute
+pained
+distrusting
+undercuts
+resins
+gradual
+rockets
+twinkling
+larvae
+surveys
+unusual
+breezes
+patriarchs
+plots
+fingerprint
+word
+bent
+reshaping
+Aboriginal
+assures
+Thailand
+verging
+represented
+angry
+ranging
+bunks
+sunglasses
+energized
+scenting
+ferns
+playground
+circulates
+norm
+coexists
+African
+competing
+outreaches
+thrashing
+stipulating
+massaging
+technical
+consistently
+ecological
+Mars
+underlining
+implicit
+memorizing
+meriting
+invade
+animating
+combatants
+condoned
+clowning
+wrenching
+Warmings
+claim
+heath
+scarf
+lockers
+most violent
+Accustomed
+oversized
+tangos
+carries
+contracted
+footing
+eludes
+Exquisite
+vernacular
+saves
+more successful
+fungi
+ambiguity
+pagans
+sprucing
+ranged
+hover
+bored
+silences
+abide
+Unilateral
+integrity
+increased
+shoved
+sought
+staggers
+capitalized
+creativity
+ices
+complementing
+sprinkling
+honeymooned
+twirled
+lungs
+more notable
+fellowship
+hanging
+publishes
+initiated
+reinforcements
+locales
+jogs
+negligible
+prestige
+prophecy
+pump
+yogurts
+twentieth
+dancing
+slams
+jades
+wives
+ship
+differentiating
+communism
+opinions
+scamming
+alright
+consistency
+checkpoints
+cancelled
+vividest
+posters
+nightclubs
+pointless
+well-known
+Freak
+trickles
+communicated
+understanding
+Resume
+volleyballs
+plane
+whiffs
+realtor
+mentioned
+conformed
+spring
+more global
+guards
+leathers
+immersed
+doughnuts
+stares
+depicting
+collaboration
+numbs
+minors
+unravels
+implicating
+sticks
+enrollment
+promoted
+index
+earth
+privatizing
+flexes
+flowers
+approximates
+adversity
+snatch
+harming
+thwarts
+sparrow
+spruce
+chimneys
+badly
+revolve
+audited
+plateauing
+prince
+waterfalls
+voucher
+vanguard
+sacrificed
+tulips
+postulated
+silly
+hassles
+dusk
+worth
+preyed
+implementation
+impairing
+Evangelical
+thumps
+zulu
+harmony
+yahoo
+unconscious
+naturalists
+brides
+scraped
+wets
+piecing
+cars
+scrapes
+worshipped
+evaluates
+worked
+guilted
+remainders
+afforded
+startle
+scratches
+restored
+weaving
+whispers
+arithmetic
+conditioner
+taping
+credentials
+insanity
+compliances
+marrow
+motifs
+intercourses
+communicate
+filmmakers
+growing
+resorting
+proxy
+flats
+steward
+constellation
+eradicated
+pleasantly
+Recorder
+You
+assimilates
+vetted
+schoolgirls
+sheep
+ethnics
+robotic
+lengthen
+magician
+suicide
+highlight
+halved
+flirt
+gravitational
+closest
+wales
+compounds
+funkiest
+intrusive
+disco
+self-portrait
+shrines
+sputter
+captain
+syringes
+curry
+back
+seasides
+highland
+utterances
+bustles
+optimizes
+shelters
+error
+educations
+landings
+replay
+recoiling
+craving
+Finnish
+situates
+boys
+footings
+bunches
+faint
+blend
+billionaires
+bully
+corrupts
+suicidal
+drifts
+christian
+redeems
+sweeties
+scanner
+Partial
+pancake
+aftermath
+inwards
+dubious
+exhibit
+flips
+organism
+pushes
+burgers
+prosecuted
+candidacy
+gamblers
+permeates
+marinaded
+Sage
+intimately
+recruited
+potteries
+generating
+originally
+Cautious
+puddle
+personality
+allowances
+tinted
+keeper
+reorganizing
+deserted
+robust
+beacons
+elbows
+dismissed
+babying
+diamonds
+crystals
+crests
+textured
+franticker
+decides
+dizzier
+belongings
+preciser
+reverenced
+chuckle
+accumulation
+incense
+zoning
+slowest
+unilateral
+twenty-firsts
+milking
+Supreme
+drinker
+inclination
+historian
+falls
+stall
+Norwegian
+resign
+prowls
+hemming
+radiations
+thaw
+Africa
+psyching
+dissipates
+shingled
+lineages
+bargains
+intervened
+Lipsticks
+cringed
+yarns
+undone
+bedside
+consist
+breakers
+dials
+trappings
+chime
+powered
+probably
+veiling
+captives
+yearned
+gadgets
+spies
+months
+philosophy
+capital
+freelance
+consolidated
+Protocol
+mystical
+settle
+visible
+gosh
+familiarity
+degenerates
+shaky
+giraffe
+tufted
+resignation
+location
+voyages
+curving
+shutdowns
+manifester
+breath
+faking
+t-shirts
+funded
+navigator
+knotting
+Maxims
+archers
+shortens
+demonstrates
+duplicated
+ebony
+weakened
+instituting
+conceal
+trajectories
+immensely
+ambivalent
+barbing
+impressions
+fights
+resting
+Gothic
+sightseed
+Jet
+canceling
+donna
+acquitted
+validation
+martini
+one-on-one
+positive
+diapering
+instead
+biopsying
+habitats
+sculptor
+synced
+hollowing
+quail
+create
+prohibition
+arch
+cameramen
+Colorful
+quarreled
+defies
+happening
+peaceful
+forces
+angling
+attacker
+bridegroom
+sleeps
+hearty
+stakeholders
+prettiest
+crisscrosses
+tinges
+ORE
+sunscreen
+gig
+clinically
+inception
+nephew
+flexibility
+stringent
+Different
+black-and-white
+poppy
+picnics
+participation
+haze
+poles
+ally
+entries
+supered
+Amass
+convoyed
+nuances
+scope
+safest
+noodle
+dieted
+watchers
+ransomed
+wheelchair
+realises
+narrowly
+spoon
+Refuse
+galley
+additionally
+chilly
+diverting
+most informal
+colony
+mutual
+daggers
+blistering
+misuses
+slows
+participates
+bias
+Mean
+generational
+ordinance
+passer-by
+sick
+costume
+sleeve
+gravels
+conflicts
+whips
+catalyst
+analog
+Blackberry
+precipitates
+fireplace
+fabulous
+pm
+avalanched
+predator
+Parmesan
+puddling
+befriending
+odours
+foliage
+coloring
+glue
+inherently
+editing
+mommas
+pleasing
+contends
+carters
+backpacking
+oysters
+allocated
+squirrels
+tongue
+teapots
+aura
+negotiated
+disregarding
+bowed
+penetrated
+baby-sitter
+textures
+woo
+fields
+etches
+rung
+although
+contentions
+with
+Idle
+demos
+most fragrant
+psychologist
+lengths
+channelled
+theatres
+subcommittee
+federation
+syndicating
+Lament
+graduations
+trademarks
+elder
+ploys
+leaps
+withholds
+avenue
+barricade
+pureeing
+industrial
+strewing
+uncertainty
+sexuality
+Commonly
+colleagues
+enhanced
+widespread
+geographer
+videos
+hacks
+uninterrupted
+warm
+concerned
+emptiness
+dig
+desolated
+thematic
+Appalling
+sorting
+hinting
+performance
+drizzles
+She
+encode
+leaning
+more memorable
+sobers
+edgy
+sweatshirts
+firming
+wondering
+craps
+exacerbates
+greets
+swatting
+chemists
+Quaker
+soup
+luring
+compete
+detached
+camouflage
+decked
+tightened
+binaries
+ministering
+winced
+cropping
+blackout
+hijacks
+conceding
+costliest
+identifies
+pianos
+protectors
+janitor
+remedied
+Democratic
+movers
+bloodier
+cleanup
+included
+tennis
+federations
+nickels
+expended
+eagle
+humiliating
+frankest
+trumping
+fearful
+satin
+rehabilitation
+thankful
+exchange
+implants
+polio
+ranchers
+corporal
+penguin
+knees
+nitrates
+Farewell
+items
+rationalize
+repair
+Italy
+inhabitant
+raping
+halfway
+feeling
+more seductive
+wounds
+smokers
+Martian
+more brilliant
+blonde
+hooves
+palms
+myth
+hamstrings
+home
+downloaded
+mortaring
+intersect
+ghosting
+melancholy
+interested
+convictions
+fragmentation
+centimeter
+hurls
+hem
+junkies
+pause
+recognizable
+reach
+gem
+peacocks
+extreme
+political
+meantime
+powders
+lebanese
+whiffed
+photoed
+bonding
+construction
+spoil
+youngster
+reckless
+located
+complexion
+rhetoric
+growers
+rouging
+swellings
+testing
+infecting
+ablest
+contract
+conviction
+balances
+most credible
+enjoyable
+task
+knock
+two-way
+stoving
+earnings
+restraining
+hop
+textbook
+escorts
+weaponry
+jacket
+purchasing
+unlocks
+eyeball
+Buddhism
+weight
+mottos
+more prominent
+strand
+millimetre
+crossed
+utilizes
+plowing
+faming
+whirled
+haring
+headlights
+tenderest
+restructured
+Jeep
+most social
+leafy
+liberal
+hearths
+paradigms
+navigates
+accomplishment
+owing
+subsets
+wherein
+inscribes
+lingers
+reformed
+solving
+avert
+declaring
+towering
+affective
+cavalries
+packers
+plateaued
+signs
+shifts
+arching
+paralyzing
+Signing
+more negative
+cramp
+potters
+engines
+reviewed
+stints
+dump
+engulf
+backfired
+re-established
+Committees
+yearning
+invested
+evens
+boils
+rebounded
+nonviolent
+sneers
+Psychoanalysis
+endowed
+residue
+most original
+sleepers
+envying
+warranted
+hideous
+bargain
+availed
+Japan
+enriched
+Dole
+cluttered
+chuckling
+zested
+eases
+doubtful
+widened
+demonstrate
+purse
+metro
+overboard
+isolates
+arrests
+Kurdish
+throws
+scrutinized
+merge
+Iron
+sight
+unsuitable
+republic
+envelopes
+assume
+survival
+tactical
+dismantled
+grimmer
+devastated
+culturally
+most various
+casings
+skeptic
+consists
+mind-set
+motions
+java
+surpass
+lord
+fourth
+bargaining
+systematic
+enlarges
+prefaced
+empirically
+gracious
+Poles
+satisfaction
+induces
+skewers
+twenty-six
+relocates
+succumbed
+more reasonable
+mythical
+salesperson
+Bearded
+spain
+cosies
+drip
+stickers
+cloth
+adversaries
+incentives
+continua
+redefines
+rave
+lumbered
+horse
+inhabiting
+verdict
+filings
+icebergs
+comprehend
+outpost
+Newborn
+espouses
+interaction
+Suicides
+bike
+discussions
+male
+windows
+accused
+slating
+commuting
+meltdown
+easier
+resistant
+silvers
+transactions
+investor
+ant
+cynicism
+rangers
+type
+estranged
+clock
+chases
+invader
+shook
+dashing
+Midway
+stare
+harry
+firms
+refill
+robberies
+report
+united
+trout
+centred
+awareness
+substantial
+pledge
+herded
+religions
+machos
+more festive
+baptism
+Grunt
+luncheons
+more sweeping
+apologized
+brokerage
+Violently
+punctuating
+astronomers
+cranked
+gambles
+obtaining
+legitimize
+veers
+assimilate
+binged
+short
+numbing
+buddings
+polar
+most delicious
+stumbles
+opera
+destroy
+pop
+pertains
+game
+assess
+oxygen
+continuing
+pasture
+patrols
+explorers
+flaked
+bypasses
+strapping
+bashed
+sic
+dizzied
+three-dimensional
+tilting
+opposition
+default
+amenity
+cheque
+necessitate
+unconventional
+foregrounds
+jock
+wrinkled
+feat
+Swan
+prettier
+grinned
+attitudes
+probe
+berth
+stigmata
+stream
+prime
+sneezing
+inhabited
+refrigerate
+tracking
+yawned
+harped
+supernova
+pension
+bogged
+bandit
+stack
+curves
+flattering
+grew
+Species
+refunding
+sugar
+siting
+installment
+spook
+deny
+privacy
+sanding
+signifies
+whiles
+whipped
+graphs
+boycotted
+revenge
+synthesizes
+accommodating
+augmenting
+applauded
+Well
+lambs
+ours
+distincter
+beetle
+prosperous
+sponsored
+certainly
+lanes
+grill
+stilled
+prints
+recites
+culinary
+forcefully
+latch
+spearheads
+Belgian
+begun
+chalking
+Project
+book
+curls
+slur
+signaling
+tasks
+bush
+hires
+panned
+cracking
+gingerly
+expends
+relive
+rituals
+action
+restarting
+few
+tasty
+courtesies
+asked
+apartments
+bishop
+pang
+Whereby
+us
+multicultural
+foreseen
+prune
+mires
+sipped
+rawer
+zoned
+Black
+nerve
+definitions
+networks
+contributions
+suppression
+dwelling
+cannoned
+tastes
+employs
+screen
+pageant
+Heaven
+tormented
+laden
+sealed
+tyrant
+stimulates
+modification
+panic
+clarity
+grating
+accrue
+moustache
+waivers
+sands
+till
+Male
+lip
+esteeming
+ditching
+Friday
+hippies
+deities
+He
+breeds
+petal
+hump
+fellow
+thrones
+auctioned
+soured
+sprawl
+incorporation
+disparities
+unfortunate
+rob
+fruitful
+excelling
+shift
+tedious
+Conveniently
+awards
+factual
+basks
+mound
+Graffiti
+Don
+regret
+cultivations
+rod
+professionalism
+support
+expel
+groupings
+racketing
+spelt
+reserve
+breakfast
+marriage
+paying
+lotion
+treat
+advocating
+consumption
+comics
+meal
+mistrusts
+piety
+spiralled
+beer
+bullpen
+tellings
+enroll
+groped
+wagon
+cramped
+amazed
+relaxes
+role
+terriblest
+violating
+constantly
+ebb
+most normal
+relieved
+evading
+nuclear
+undermining
+ban
+happen
+saying
+deliveries
+entrepreneur
+deposition
+rearranging
+unjust
+fallen
+Impediment
+analysed
+apprehend
+obliges
+promote
+rejoin
+aired
+phenomenon
+patriarchal
+lancing
+equipped
+ordinaries
+injunction
+bazaars
+carrier
+runaway
+loafed
+observed
+ironing
+continuums
+blogs
+taiwanese
+overlay
+unleash
+rushes
+campgrounds
+bulletins
+homesick
+attainments
+replicated
+letterbox
+unload
+kidnapped
+badge
+Competition
+sliver
+skilful
+displace
+bone
+devils
+pigment
+stouter
+bingo
+insetted
+batter
+mormon
+purses
+confusing
+hared
+studying
+among
+dissipated
+recognised
+flailed
+Summit
+captures
+deans
+entirer
+boos
+preoccupation
+talkings
+cold
+rears
+discriminate
+dissolution
+exceeding
+reviewing
+trimmed
+Upstream
+sinuses
+more similar
+excerpting
+sheltered
+slacked
+scored
+dispatch
+distortions
+redefined
+dredge
+pities
+penetrates
+woke
+frowns
+engineer
+foremost
+coining
+low-level
+whistles
+simplicity
+maturest
+bumping
+doses
+nominees
+examples
+monitorings
+finds
+poster
+frozen
+mount
+direr
+interpretation
+muse
+impartial
+warns
+find
+blurted
+animate
+boycotts
+Interests
+soaked
+obliging
+ended
+tutor
+sketched
+altitude
+pleased
+frequently
+neared
+heaters
+process
+nationalism
+barbecued
+rites
+baker
+fared
+creases
+inspects
+terminations
+turning
+fully
+calming
+border
+slid
+dunes
+urgently
+officers
+motherland
+capturing
+payoff
+deteriorate
+self-confidence
+staunched
+howl
+French
+grove
+wooden
+frames
+massacre
+Midwestern
+jails
+manifested
+bearing
+divorcing
+jockeyed
+cutting
+tanking
+exposed
+hand
+graphing
+feeders
+paving
+dissatisfying
+charges
+stiffen
+disagrees
+independence
+persistents
+shallow
+smacks
+opponents
+invest
+claimed
+quanta
+compartment
+slogan
+ages
+quarrying
+tempos
+peacock
+cranberries
+distinguished
+decries
+snowiest
+chooses
+more special
+finishes
+permitted
+dustiest
+linguistic
+tomatoes
+atheist
+favours
+solicits
+anarchy
+avail
+Adequately
+necessitates
+coexisting
+Up
+most elegant
+Luck
+takeoff
+Breasts
+cup
+community
+bullet
+configurations
+nicking
+rbi
+beige
+conforms
+dense
+financing
+spasms
+tiger
+stressor
+method
+client
+Marxist
+parking
+embraces
+libertarian
+elastics
+heck
+defaults
+poet
+rearview
+dribble
+park
+mice
+sanest
+cakes
+internal
+We
+sly
+administrations
+deposited
+twigs
+eagerly
+cone
+historic
+mortar
+studios
+obscured
+flown
+philosophical
+handcuffed
+tweak
+mocking
+manageable
+intercepts
+hugs
+tyre
+thumbed
+excavation
+fixture
+flecks
+Breast
+mile
+stung
+descended
+scoring
+stumping
+outnumbers
+secretaries-general
+terrors
+adjustable
+voyaged
+Blonde
+underscores
+devise
+knuckling
+ramification
+Equivalent
+trendiest
+encircle
+limos
+secrets
+oasis
+philosophies
+outnumber
+triples
+disliking
+relentless
+pastorals
+researching
+dubbed
+qualifying
+lovelier
+restrains
+debated
+contestants
+Expansive
+geologist
+grills
+wiring
+solider
+trapping
+flowed
+courtyard
+sniffing
+territories
+breedings
+tyres
+relishing
+protruded
+expenditures
+soaped
+downturn
+crackling
+majored
+citizenship
+faiths
+diversifies
+slacks
+blurs
+denying
+teammate
+doctrines
+more comparable
+mask
+overcoats
+fury
+sprigs
+showroom
+forgiven
+individualized
+hectares
+asphalt
+kayak
+countertop
+readying
+most prudent
+shuns
+compounded
+madness
+mazed
+chatting
+compiling
+nuclei
+headiest
+coincide
+suspects
+exhibitions
+savvied
+bouts
+secretaries
+dickens
+most experienced
+foreclosure
+blown
+effects
+frog
+stimuli
+Coutures
+comparable
+famed
+genesis
+within
+flex
+hijacked
+trap
+axe
+gelling
+subtracts
+golf
+colonial
+pools
+brute
+gone
+boat
+thrived
+witted
+crowded
+enacted
+koshers
+bed
+occupations
+circulating
+runoffs
+skidding
+skiing
+Unpublished
+screened
+begged
+instructive
+upbeat
+lighthouses
+jams
+disciplined
+heaven
+amazes
+declaration
+hoard
+two-year
+forming
+festivity
+pronounce
+detachments
+comfortably
+roared
+grooves
+subtler
+snipers
+Bottom
+interviewer
+bodyguard
+peruvian
+taxing
+beggar
+signifying
+lying
+vibrated
+barely
+East
+analysing
+imams
+replies
+energize
+initial
+amuse
+bribed
+cure
+tiny
+chips
+Tribune
+logged
+bolted
+labours
+attics
+cache
+strayed
+waitressing
+socials
+God
+reveling
+Britain
+smashes
+style
+necessarily
+most genuine
+instrument
+nominee
+cashier
+tanned
+extends
+fangs
+establishment
+Gourmets
+exploits
+thighs
+genome
+Blitz
+heartbeats
+licenses
+inferred
+coasters
+halls
+third
+search
+enthusiastically
+platters
+trimming
+threading
+landscaping
+mercies
+gravel
+owned
+awfully
+re-electing
+self-esteem
+retained
+delinquent
+dwellings
+grading
+backfiring
+prostituted
+racked
+URLs
+redeemed
+intimidated
+sequencing
+bitterness
+mouthing
+re
+interrupting
+Amazon
+bloc
+pretty
+mistier
+Unsuitable
+blueberries
+breakups
+doctorate
+scaling
+distinguishes
+shootout
+ramping
+mown
+ascend
+patenting
+thickens
+momentum
+Ye
+dominican
+centuries
+tunas
+gal
+Worldwide
+uncovers
+resisting
+kilo
+ufos
+experience
+idol
+Fin
+eccentrics
+messed
+silentest
+FATE
+dominating
+contrasts
+liver
+most tolerant
+brim
+humiliate
+marijuana
+contractor
+Lead
+dyed
+catch
+coils
+gagged
+grammar
+mechanics
+siege
+galactic
+fingernails
+merriest
+professionally
+Distinctive
+canisters
+points
+giggles
+Dual
+shapes
+wearies
+fashions
+abound
+Intense
+shuttle
+constructs
+relevant
+most gorgeous
+bookings
+pelted
+carols
+commends
+bewares
+battling
+dedicating
+cranking
+clasped
+doping
+broths
+modified
+numbering
+closets
+prostitutes
+parting
+risen
+heavenly
+redirect
+dynamites
+appealed
+earthquake
+paled
+thronging
+linger
+Oceania
+scoot
+platinum
+artworks
+tremble
+witch
+output
+premises
+most regular
+wasting
+twisted
+constitutions
+hurries
+category
+infiltrate
+rumour
+brown
+deterring
+reshapes
+uneasy
+overshadows
+scorn
+grope
+entrance
+impeach
+dinosaur
+Dancer
+emerging
+chiming
+gallon
+honesty
+sneeze
+beckoned
+prudent
+tallest
+queue
+nanny
+valentines
+areas
+honoring
+concreted
+eagerer
+collides
+varsity
+journeys
+Serenity
+pondered
+wearied
+blades
+coziest
+impacts
+craters
+civic
+shredding
+bakeries
+accessed
+avant-garde
+lagoons
+spendings
+eclipses
+weaker
+zeros
+ledgers
+restructuring
+serviceman
+shrugged
+tugging
+recipient
+detaching
+flash
+solitude
+generalized
+scars
+dioceses
+spotlighted
+provocatives
+crocodiles
+most selfish
+prosecutions
+Nope
+truthful
+scoops
+bulging
+textual
+consortiums
+Import
+tensing
+mellows
+Database
+somehow
+wizards
+decline
+typewriter
+most accurate
+dragged
+pumpkin
+somberest
+conveyed
+subject
+Flashbacks
+taming
+ooh
+entrusting
+charities
+undid
+crowed
+presumption
+Early
+correctest
+delegating
+hillside
+catalogue
+mattress
+feather
+contacting
+pantry
+sturdier
+sparkle
+wilted
+accented
+paroling
+derails
+rather
+contented
+Internet
+skateboards
+oatmeal
+dampens
+mead
+crater
+pastors
+canadian
+revels
+feet
+fee
+whooped
+button
+measure
+Moon
+aware
+amassed
+averaging
+muttered
+thickest
+colonist
+dumbbells
+cough
+harness
+pageants
+most enormous
+demon
+most intentional
+camping
+Fourteenth
+confidential
+cutter
+greengrocers
+mist
+transient
+Identity
+rainstorm
+channelling
+irresponsible
+enameling
+hybrids
+seminaries
+cached
+completed
+Sub
+satins
+bullied
+robed
+widowed
+marketers
+Ginger
+vetoing
+brag
+transmitting
+bronco
+bronzed
+immigrations
+privatizes
+coarsely
+gleans
+immune
+tricked
+satisfactory
+turkeys
+commissioners
+uphold
+trustees
+bombard
+deceptive
+carried
+optimistic
+presumably
+exacerbated
+correctional
+watchdogs
+right-wings
+follow-up
+stirrings
+exporters
+hierarchical
+frosting
+ascertains
+abandon
+assigns
+isolation
+entail
+apprehends
+loser
+june
+yeses
+explore
+catastrophic
+wafting
+toss
+forensics
+arouses
+Legislator
+heaps
+maturer
+wretched
+dot
+misdemeanor
+interface
+blade
+Tastings
+emissions
+vicious
+faster
+fleet
+orbit
+nestling
+attributable
+inner-city
+institutional
+belly
+listings
+cucumbers
+steals
+soothes
+disperses
+settled
+mining
+backyard
+evacuations
+biopsied
+interplay
+average
+paraphrase
+merchant
+emanated
+Flashlight
+pierces
+sister
+perfectly
+copped
+interdisciplinary
+taco
+imprison
+exports
+jaguar
+calibers
+accelerator
+condominium
+marvelling
+cords
+evacuation
+freshest
+pervades
+livers
+ashore
+securing
+crashing
+illusion
+slewed
+boxes
+hijack
+yep
+syndrome
+compensated
+clarification
+sopranos
+easter
+rebelling
+protesting
+reinvented
+spitting
+desserts
+criticized
+expels
+famine
+yelling
+hamlet
+incensing
+intervals
+peek
+etched
+candid
+rosters
+more repetitive
+categorizing
+mixer
+doughs
+Trillion
+orphaned
+quirkiest
+Sweden
+tee
+inspired
+putting
+spend
+yes
+career
+clipped
+more ingenious
+eschewing
+naturally
+islamic
+expulsion
+commonplace
+collared
+indirect
+arsenal
+braining
+sofas
+pricks
+airline
+fraternity
+Attach
+perk
+whale
+temp
+drowning
+protruding
+deference
+crisscross
+collects
+Double
+Mediterranean
+prescribing
+penalized
+commissioner
+exhaustion
+ethoi
+juggles
+rewinds
+buck
+storyteller
+biosolids
+french
+diets
+freedom
+discredited
+enclaves
+ceremony
+rowing
+robs
+smartest
+flutes
+finalist
+provided
+interpreted
+grief
+trembles
+powering
+myths
+thriving
+shingle
+more angular
+clinics
+skewed
+more skillful
+transmitters
+chopped
+competitive
+vignettes
+lagged
+gymnastics
+awesome
+chorusing
+ideological
+hounded
+humor
+numerous
+nourish
+breathed
+Consolation
+cavity
+philosopher
+knits
+Anything
+casually
+modern-day
+meager
+Fore
+dreamer
+lefties
+dread
+forecasting
+starts
+spared
+smothering
+kangaroos
+Home
+fiance
+avoidance
+rubbed
+partly
+occasions
+altered
+bladder
+undermine
+native
+dangerous
+afraid
+sneaked
+coward
+impeaches
+peels
+raving
+complain
+lords
+matrix
+activity
+continuous
+educates
+nutrients
+hiked
+frame
+liberty
+strategist
+westerners
+debate
+eluded
+chill
+collected
+glimpse
+rhyme
+anomalies
+plastered
+trembled
+dwells
+implicate
+crisis
+tight
+chapels
+April
+bled
+brewed
+resemblance
+smothered
+diagnose
+tango
+reappear
+disagreeing
+spent
+majors
+idols
+theorize
+turnings
+estranging
+epidemic
+stiff
+madam
+hemispheres
+flourished
+seams
+hamstringing
+Conjunction
+smudging
+contribute
+tense
+info
+docks
+subscription
+parchment
+harvested
+unnoticed
+Rice
+sewn
+helmets
+embark
+moved
+grappling
+bourbon
+pinging
+ferrying
+sofa
+dates
+corrective
+spellings
+snugging
+mortgages
+rationality
+energy
+gobbled
+flutter
+most seductive
+fuels
+everywhere
+warranties
+due
+pat
+imprints
+most merciful
+insult
+teachers
+Ices
+relate
+breast
+detouring
+sections
+swore
+constituencies
+touches
+antibiotic
+expects
+prevention
+historically
+diplomats
+most passive
+Insurer
+homecomings
+removes
+monumental
+campus
+updated
+delete
+taper
+based
+awkwardly
+generator
+dissipating
+suspended
+rifted
+bombarded
+maps
+questionnaires
+circulation
+astronomer
+torn
+Virtue
+wand
+fuming
+briefing
+lions
+bland
+jamaican
+rewrite
+Doctor
+vibrant
+strickening
+courses
+Nationalism
+delightful
+theme
+consummating
+relatively
+horizons
+piping
+alarm
+gins
+preoccupied
+oddly
+witty
+classmate
+intermediated
+correcter
+magnifies
+reigns
+critic
+most marginal
+shrugging
+installed
+household
+dismantle
+budgeting
+studs
+Palestinian
+hitters
+overcome
+posts
+simpler
+backs
+outrages
+tying
+flake
+yearly
+earthly
+verifying
+aforementioned
+Tempting
+neurological
+combine
+analytic
+exhaust
+grip
+socializing
+glazed
+explosion
+most exciting
+most viable
+On
+owners
+liquid
+larva
+picturesque
+airmailed
+adept
+sponges
+Solar
+clued
+prowess
+Adverse
+Content
+Yes
+showdowns
+Handmade
+september
+tribune
+thinking
+enriching
+integrating
+piazzas
+booster
+plunges
+baron
+hogging
+chaos
+Reverend
+adorable
+treasury
+appraisal
+messing
+flesh
+affirmation
+regulate
+time-consuming
+Favorably
+launches
+rugged
+baling
+cigars
+uncommon
+parliament
+adverse
+firemen
+bail
+vice
+lifespan
+Proclamation
+duels
+twenty-four
+mexican
+fish
+sightsees
+guilts
+campaign
+blunting
+Mexicans
+leap
+Not
+Passing
+wavering
+harboured
+internationally
+most stressful
+Sparkling
+disposal
+metropolis
+rural
+hovering
+silhouette
+roads
+fella
+entertainment
+long-term
+closet
+crapped
+snatches
+rationed
+submerging
+re-enters
+seriously
+sprouts
+faults
+described
+monastery
+grunts
+resonating
+rational
+photographing
+June
+perimeter
+row
+Burning
+grin
+middle
+wanes
+sunscreens
+recorder
+computed
+questioning
+housing
+Ray
+whisks
+masters
+inefficient
+radicals
+splash
+vibrate
+mimicking
+wept
+thankfully
+profanity
+traded
+Car
+consequence
+astronauts
+delineate
+stepmother
+believes
+acquaints
+existing
+bullying
+upheaval
+wormed
+unnamed
+instrumentation
+maples
+implies
+German
+assassination
+blushing
+underlies
+worsened
+upcoming
+giant
+squirms
+practicing
+hitching
+torch
+trip
+fend
+reporters
+thoughtfully
+eye
+traumatic
+schedule
+caricatured
+advertise
+Outdoors
+boning
+sporadic
+rehearsal
+lemon
+rush
+corporals
+modernist
+locus
+urgency
+howled
+secretary
+foxes
+decorating
+Indicative
+construing
+factored
+expenses
+obliged
+acutest
+segregation
+foundered
+estrogen
+criticizes
+bales
+fungus
+desolates
+hooking
+crises
+converge
+premature
+write
+Dear
+east
+whichever
+upgraded
+waived
+shoe
+caves
+coughed
+subordinated
+specimens
+evades
+net
+rebound
+enclosures
+health
+populates
+tramming
+rented
+evaluated
+ranches
+salsa
+jollier
+Coordination
+nuisance
+infiltrating
+sorrowing
+grassing
+termed
+subsequent
+blue-collar
+priests
+reclaimed
+thorns
+dunking
+groundworks
+evener
+accept
+billed
+Asia
+teammates
+Effort
+keyboards
+centigrade
+clicking
+testament
+pressures
+most infectious
+brooking
+universally
+identification
+standpoints
+foamed
+initiations
+high-tech
+coupling
+paralyzes
+residues
+guides
+snort
+planned
+familiar
+mucked
+depressing
+fin
+detach
+nicked
+Sunsets
+clawing
+dozes
+molded
+more economical
+farmhouse
+savorier
+Antarctic
+wired
+Hispanic
+nebulae
+nonetheless
+dirtier
+chuckled
+premieres
+wrestler
+stations
+humiliates
+capacities
+enhances
+scanned
+mentioning
+cater
+receptions
+mincing
+presbyterian
+denoted
+distorts
+grab
+minor
+faintest
+squatting
+permanent
+eaves
+touch
+trains
+panthers
+ransoming
+backlashes
+reacts
+Refreshing
+Seasoned
+dissent
+furnace
+depicted
+odder
+Statistical
+sequences
+enriches
+unanimous
+eddies
+interracial
+Inland
+petted
+platform
+graying
+aroma
+soy
+castle
+iraqi
+most successive
+Planes
+motioned
+install
+words
+axes
+endowment
+spiders
+accessaries
+jagged
+bet
+Coherent
+aced
+When
+Spiritual
+cobs
+American
+progressed
+hosting
+principals
+lurking
+url
+curling
+automotive
+smells
+cooperation
+shrouded
+pronounces
+meanwhile
+plumper
+schoolbag
+vintage
+plush
+lawsuit
+thrilled
+Ideological
+correspondent
+crackdown
+cosiest
+upon
+inclusive
+torture
+waved
+theologian
+locates
+notebooks
+invented
+scandals
+logical
+preliminaries
+op
+flopped
+nearby
+gulps
+errors
+circumstance
+plumped
+demolition
+luckiest
+measurements
+frequencies
+laps
+council
+prototype
+Desktops
+packs
+para
+eased
+split
+connections
+make
+moans
+glisten
+disconnect
+reunite
+hate
+concreting
+recommended
+scientist
+loafs
+exterior
+endows
+blazing
+paintings
+decidedly
+auditory
+bob
+stake
+requirements
+roes
+echoed
+tidied
+enquiries
+interesting
+conning
+invests
+spurting
+duplicates
+processing
+candidate
+Western
+funny
+mend
+warehousing
+joy
+streaks
+influentials
+mists
+syndicates
+exchanges
+flashier
+cooperates
+slopes
+crowned
+righteous
+carolled
+creaming
+staking
+pineapples
+gulf
+swooping
+admirable
+dreaming
+deep
+actor
+rumoring
+embryo
+mangos
+kilogram
+errand
+industries
+Ta
+October
+Sceptical
+illuminating
+oppression
+receivers
+peered
+pistols
+borrowers
+persuades
+waits
+framing
+fund-raisers
+proving
+spacecrafts
+affirm
+protrude
+indicate
+colouring
+paused
+flavoured
+reclaim
+bans
+coaching
+statistic
+assassinations
+ballots
+editor
+understandably
+bronzes
+awoke
+books
+emphases
+embargoing
+pruning
+deletes
+ridiculous
+retrospective
+hoards
+discussing
+rainforests
+more selfish
+willingness
+bearings
+coach
+feeder
+overlooking
+overflowed
+grittiest
+paramilitaries
+foods
+fabrics
+investigations
+Goodwill
+shredded
+pens
+rushed
+disaster
+metaphysical
+caused
+bro
+fit
+height
+coughs
+twigging
+hook
+blurting
+criticizing
+pledges
+accelerated
+spewed
+overturns
+obliterates
+blitzing
+frenchman
+punk
+ladening
+Bow
+grids
+coin
+worthless
+yesterday
+thoughtless
+sportings
+parcelling
+toxin
+disgrace
+freely
+jewel
+auction
+bundles
+surprised
+airtight
+individualizing
+reminders
+acknowledges
+baroque
+memory
+run
+newcomers
+playwright
+nicks
+treasurers
+Macs
+ceremonial
+mutually
+counselled
+birthplace
+foreign
+thrive
+unfolding
+proposals
+conflicted
+braid
+helm
+schoolgirl
+YOU
+diagnostic
+thousand
+junking
+grates
+devastation
+draw
+most troublesome
+flint
+reserves
+aptest
+arced
+moonlights
+arrogance
+contests
+Constructive
+stops
+basketballs
+bodies
+shreds
+Intact
+molested
+forearms
+juries
+cushions
+finish
+happiness
+condos
+studded
+outweigh
+porters
+informing
+sadder
+Olympic
+defeats
+Hallmark
+stewardesses
+Autism
+thanking
+vibed
+hospitalizing
+strength
+pokes
+shyest
+wittiest
+excitedly
+twos
+passengers
+targeted
+Arch
+facial
+more golden
+reconciliation
+disrespect
+defied
+decreasing
+centralized
+seizes
+curly
+fables
+puddings
+kaiser
+dictatorship
+inject
+nearer
+stomachs
+skirts
+flattens
+nickel
+skater
+labors
+Us
+extremists
+addictive
+jungles
+spurs
+flame
+most radiant
+transform
+roundest
+Seas
+envisions
+flinches
+herbal
+terminal
+bass
+immaturest
+Spring
+sneer
+manuscripts
+Tear
+swimmers
+shout
+risky
+objection
+bummed
+shudder
+sizzle
+cuisines
+surge
+vented
+gladly
+more fragile
+seeps
+troubling
+most sensational
+preceded
+starring
+ceremonies
+husky
+jewish
+empathy
+bearer
+ornamented
+at-risk
+fieldwork
+apprehending
+starter
+coordinates
+verges
+hard-working
+far-reaching
+bonds
+match
+outburst
+crooked
+diverse
+routinely
+rivals
+trailers
+treks
+kissing
+visionaries
+wow
+fresh
+intelligent
+meandering
+erasers
+lambed
+avid
+desks
+elsewhere
+thunders
+freshmen
+most gracious
+registered
+prosperity
+awkward
+prodigy
+entering
+retreats
+amending
+sexist
+upscaled
+These
+inconveniences
+underlain
+grey
+dismiss
+armchairs
+conquered
+transmissions
+fossil
+convicting
+paychecks
+worldlier
+earned
+formulated
+lofted
+hurdled
+inequity
+commutes
+bolting
+library
+Perfect
+obviously
+commuters
+accents
+efficient
+soberest
+troubled
+formed
+anglo
+cupboards
+barbed
+moderator
+handier
+excusing
+wires
+resolution
+Once
+plums
+illegally
+succeeded
+autopsies
+gluing
+thrusting
+offended
+consequences
+designer
+disintegrates
+drill
+unfriendly
+dampened
+crowns
+tattoo
+stadiums
+roguer
+chilean
+breakthroughs
+shrunk
+expires
+grass-roots
+more predictable
+pinnacles
+deducted
+amount
+consciousness
+churning
+goggled
+observatory
+Varsity
+teeing
+gators
+guns
+mistresses
+documentaries
+stepfather
+sheet
+most skillful
+bar
+experiencing
+apt
+scaled
+femme
+postpones
+herald
+dispute
+astronomical
+concussions
+theaters
+Switzerland
+admiral
+Practically
+drag
+occasionally
+despise
+sags
+forgot
+overthrown
+tasking
+most recurrent
+outcries
+ladders
+brewers
+injunctions
+texted
+averted
+boilers
+hefting
+most substantial
+lid
+resident
+nominate
+glaring
+rundowns
+advancement
+spiciest
+ministered
+cripples
+cease
+arches
+relinquished
+pasturing
+wholly
+leader
+exclaimed
+Oriental
+quarrelling
+rivers
+making
+shown
+workouts
+governments
+disciplinary
+ministries
+nick
+hops
+incorporations
+backdrop
+caring
+mater
+skim
+overate
+fare
+seeing
+rehearsing
+agencies
+scales
+collection
+lashing
+Organic
+Postulate
+astonished
+radii
+garrisons
+immediate
+satellites
+casket
+nosing
+picture
+filling
+unstable
+bisexual
+sweetest
+jerk
+pleasantest
+delicate
+passersby
+shave
+relieves
+stiffens
+attired
+undercut
+crept
+journalistic
+lengthening
+crested
+hour
+sweater
+anti-depressant
+assortment
+constituent
+disappointments
+Respectable
+charting
+floats
+assassins
+rollers
+isolating
+corrected
+practically
+sharpened
+nurturing
+learn
+twenty-three
+controversies
+mallard
+crabs
+trainers
+launched
+chilling
+faith
+continents
+paddling
+radius
+tacos
+packings
+prowling
+finally
+rivaling
+qualify
+rosemary
+disturbed
+re-examines
+venturing
+Light
+segregated
+holes
+sputters
+plumbers
+RIG
+cults
+Cavalier
+nicely
+memos
+strike
+more accurate
+besting
+Brazilian
+regulars
+brick
+screams
+Greece
+sufferings
+my
+tribe
+Trigger
+tributary
+participants
+algorithms
+Brazil
+pandas
+all-stars
+desolating
+furies
+drastically
+bowls
+individualities
+revered
+approves
+melt
+ancestries
+ripen
+drinking
+cokes
+wander
+motels
+daylight
+reluctant
+Wares
+multiculturalism
+multinational
+inn
+grapple
+snare
+most seasoned
+sludges
+bets
+ripens
+arranges
+physicist
+protesters
+economy
+hazarded
+diced
+drowns
+hard
+knelt
+verses
+recurring
+updates
+breasted
+rosemaries
+brief
+ferocious
+revives
+encodes
+drank
+steel
+barbershop
+allegation
+striving
+sling
+balded
+honorable
+quarts
+cutters
+sheriffs
+waterproofs
+collector
+drags
+industrialized
+assurances
+transporting
+drop
+succeed
+kitties
+butchered
+netted
+steaks
+spat
+rafting
+hymns
+weakness
+manipulate
+shiniest
+Addict
+frankly
+lamest
+bulks
+during
+antarctica
+seeking
+fetches
+paces
+ghostlier
+fairway
+gambler
+pushing
+pricey
+organizers
+infinity
+backward
+printing
+violin
+priorities
+practising
+vomited
+what
+petitions
+vanities
+vital
+angler
+sprawls
+donning
+Neutral
+erupt
+heat
+stranded
+functions
+dudes
+tiptoeing
+chartered
+captured
+scar
+severities
+North
+sounding
+lows
+aid
+kingdoms
+affiliating
+diplomacies
+catfished
+laws
+vibrations
+mediation
+patriotism
+peasant
+assertions
+informed
+chilis
+defers
+adds
+page
+embed
+waging
+initiating
+broadcasting
+comeback
+lobbying
+interception
+convertibles
+coax
+Demon
+predict
+Olympics
+stand-ups
+attire
+brasses
+pollution
+bag
+appropriating
+stove
+phenomena
+surpasses
+mesmerizes
+buffers
+welds
+filthy
+arcades
+dots
+heeded
+pencil
+why
+rewarded
+father
+government
+eerie
+cardinals
+offsetted
+Yep
+patching
+shelf
+trolls
+survive
+broadband
+strawberry
+swim
+harsher
+rebates
+photoing
+feasts
+focused
+eat
+grilled
+brokering
+Practical
+adjust
+archived
+crafted
+slit
+raises
+validity
+inquires
+Marathons
+finished
+fevers
+nicknames
+flecking
+mastery
+alpha
+services
+tigers
+horseback
+metros
+wits
+Cherokee
+itched
+permits
+most sensible
+congressional
+lobe
+spade
+fabricate
+whiff
+cocked
+predicated
+mudded
+attribute
+soundtrack
+understandings
+prompted
+bewared
+distributor
+deliberated
+entrepreneurial
+coldness
+convince
+aquatic
+oilier
+owe
+attention
+rawest
+conferring
+billings
+strangled
+detentions
+frantickest
+barracks
+Concord
+scooter
+conceptions
+onion
+referencing
+bottled
+dark
+gums
+salesmen
+corruptions
+fragrances
+playmates
+disclose
+discards
+docking
+invaders
+sneaking
+tarting
+streamlines
+stimulate
+tarring
+worm
+fundamentally
+panelist
+administer
+directing
+termination
+reverts
+evokes
+Word
+beneficiaries
+cook
+tombs
+dives
+Depressions
+patience
+brigading
+Taliban
+likeliest
+doped
+accordance
+differentials
+puffing
+Polar
+battery
+pitying
+dropout
+norwegians
+Concerning
+postmodern
+unpacking
+strangle
+deems
+sequels
+descendants
+aligning
+mould
+spilled
+wraps
+northwestern
+atomic
+noble
+complex
+threshold
+blank
+inadequacies
+mapped
+perceive
+cruder
+speeding
+coalition
+framed
+inheriting
+proverb
+most valid
+leaking
+meshed
+most neural
+lose
+unites
+directly
+debris
+windings
+midsummer
+chores
+loans
+ejecting
+burdens
+projections
+received
+Overseas
+pangs
+justify
+yells
+protecting
+scoots
+beep
+lung
+supplemented
+robing
+enforce
+working-class
+Apparent
+flexible
+concrete
+bubble
+opposes
+denoting
+strait
+Crescents
+violent
+flourishing
+satisfy
+grandpa
+denouncing
+pitcher
+flushes
+occupying
+grieving
+both
+commoner
+conglomerating
+rundown
+eagles
+rains
+furnish
+third-party
+harvests
+pledged
+merited
+practitioners
+chromed
+spectacles
+Doctoral
+detailing
+deteriorating
+export
+exempted
+attending
+length
+bogs
+infuse
+wooded
+rummages
+classrooms
+vacuuming
+elf
+blackboards
+massed
+appealing
+profanities
+Executive
+hurricanes
+disposable
+hypocrisy
+erratic
+aging
+eyeballs
+censures
+mosques
+trim
+roamed
+degrades
+tabooed
+bragging
+pearling
+bleaker
+imposing
+manipulating
+realistically
+rodeos
+uprights
+shortening
+earl
+syndicate
+portrait
+highlands
+cottages
+extend
+showers
+underground
+temperature
+Franks
+most colourful
+honeymoon
+pursue
+repetitions
+tapestry
+sorest
+rook
+Action
+balancing
+cots
+renounces
+mums
+worrying
+behavior
+appearances
+duct
+registries
+tomato
+intenser
+strengthened
+Science
+lamp
+casserole
+compromised
+sailed
+tenure
+modestly
+Candid
+cubicle
+cane
+Key
+dialects
+stiffness
+denied
+endures
+pundits
+wreaking
+Underlying
+accountability
+whored
+wiggles
+elk
+protest
+sustained
+dissolving
+emigrate
+actions
+sunlight
+geometry
+more scenic
+specificity
+Migrations
+autonomy
+hollered
+deprivation
+innocence
+previews
+oxen
+threaten
+greenhouse
+appareled
+unified
+encloses
+declines
+Petroleum
+plugs
+grimace
+refusing
+savage
+Airwaves
+spoiled
+creative
+matchings
+Amazons
+advisor
+confided
+fray
+pamphlets
+intensely
+jazzes
+gauges
+representations
+peddled
+Father's
+physiology
+talk
+resonate
+Justice
+squirting
+spinning
+prayers
+antlers
+pursing
+chuting
+intermediary
+radiation
+amused
+biographers
+slighter
+jaguars
+judgement
+democracy
+grants
+incapable
+crusades
+buffalos
+mentally
+disappointedly
+Shorts
+lashes
+gripping
+breadth
+planks
+steadied
+protracted
+All
+discoursing
+Macho
+most attractive
+case
+Plate
+procession
+homestead
+ourselves
+anthropologists
+node
+sprinting
+hostage
+yank
+daisy
+urging
+inputted
+comers
+thy
+brownie
+tampers
+scented
+purporting
+forked
+Subjectivity
+propositioning
+wagged
+mine
+first-year
+neater
+more seminal
+quiz
+Mercury
+impairs
+traditionalists
+powerless
+intact
+elections
+swarm
+acids
+chairs
+remind
+postoperative
+skull
+arena
+martinis
+perishing
+dementia
+thrashes
+fulfillment
+organizes
+fetched
+aged
+him
+rode
+Protein
+electrons
+Exotic
+liabilities
+rental
+wrist
+scants
+racketed
+websites
+infuses
+fingernail
+vacant
+dire
+delights
+common
+custom
+mammals
+wars
+throughout
+metropolitan
+Yoga
+moonlight
+chucked
+trims
+remoting
+elective
+displacement
+midterms
+possess
+terrify
+sweden
+buffet
+plaids
+iron
+railway
+seduces
+most inventive
+August
+signify
+horseman
+boycott
+exclusive
+flashlight
+mortality
+choppers
+algerian
+duet
+drafts
+sunburnt
+beet
+supers
+contextual
+infinite
+policy-making
+lookout
+more trivial
+halloween
+filibustering
+profited
+graffiti
+tub
+buffering
+posted
+achieve
+tailing
+quickly
+bucks
+exported
+banner
+invoking
+turf
+bulldozers
+electrical
+pouching
+rinks
+reciting
+caging
+Bill
+cattle
+blisters
+proctors
+hank
+october
+favorably
+courthouse
+limit
+strengthen
+Stock
+institutionalize
+graphite
+hesitations
+guard
+designing
+cost-effective
+flawless
+negotiates
+Boxing
+avenues
+interference
+limes
+myself
+ratifies
+tainted
+chin
+flanks
+jerseys
+stones
+pains
+more passive
+topical
+newt
+coercing
+college
+collaboratives
+played
+episcopal
+brims
+justifying
+rottenest
+boosters
+coatings
+accounting
+socks
+preys
+acceptance
+motionless
+trashing
+hams
+claiming
+salary
+pioneering
+Chinese
+motel
+Flavor
+creme
+ditches
+rep
+re-establishes
+sums
+extensions
+spasm
+staple
+murkiest
+prospective
+conventional
+Extent
+portrayals
+pixel
+anthropological
+coordinating
+pharmaceutical
+circulated
+shallots
+insulin
+formative
+peril
+rusted
+fort
+spraying
+informative
+investors
+richer
+pueblos
+unearths
+most regal
+taxed
+more colourful
+solely
+oohs
+taxiing
+Secular
+economies
+musters
+audits
+more muscular
+irresistible
+predictably
+Corner
+motored
+more communal
+undermines
+meteorites
+bothering
+emotionally
+fascination
+retribution
+provinces
+flurried
+mitigation
+remorses
+pope
+Mobile
+scheduled
+fords
+hoping
+soaps
+alligators
+Capitol
+bulldog
+citizen
+doctoral
+linkage
+Jasmine
+phrases
+pitches
+suck
+watermelons
+smog
+manuring
+overrides
+emeralds
+fatty
+disappointing
+scooped
+hooping
+before
+most affordable
+amassing
+Uptown
+crunches
+electoral
+umpired
+slate
+consolation
+adding
+glows
+Catholic
+monologue
+Collateral
+competitors
+noteworthy
+bloom
+downstairs
+village
+Sundays
+paperwork
+leaped
+regal
+failure
+inspire
+babes
+lax
+sceptical
+prepares
+tonguing
+Conflicts
+most playful
+dabs
+taste
+politely
+padding
+singer
+modernization
+devastating
+hoisted
+Danish
+shun
+neon
+okayed
+dormitory
+credible
+strengthening
+engaged
+criticize
+marketed
+altar
+illuminates
+play
+screaming
+pollutant
+harboring
+rim
+innocent
+mushrooms
+phones
+coloured
+punting
+postulate
+improbable
+Contaminate
+sovereign
+reappearing
+easily
+measurement
+tented
+herds
+magnitude
+trade-off
+billionaire
+intrusion
+rentals
+nebula
+brandy
+more exciting
+beginners
+pegs
+Petitions
+reunited
+heels
+notions
+preheating
+milling
+Feminine
+scents
+gate
+totality
+mobility
+sparkling
+routes
+landscape
+most volatile
+germany
+fought
+convict
+sideways
+gulling
+Oranges
+paragraph
+stapled
+columnists
+cell
+reins
+smaller
+Conduct
+trustee
+presentation
+personas
+dashed
+millennia
+bidding
+sieged
+meagerest
+statues
+modalities
+more visible
+exercise
+pesto
+overviewed
+fortnight
+handcuffs
+democracies
+tunneling
+inserting
+bucketed
+think
+reimbursing
+sympathizes
+grand
+abducted
+camper
+morphed
+Caribbean
+liars
+Particular
+coarser
+mowed
+wage
+reconsidered
+executive
+cream
+disappointed
+most competitive
+redirects
+adaptations
+Heroic
+whirl
+guilds
+Décor
+pretenses
+weathers
+latter
+drier
+zipped
+impeaching
+otherwise
+Apostles
+anchors
+smacked
+grandfather
+annual
+reduce
+departures
+one-time
+documents
+Fun
+cashing
+perceptions
+armor
+markers
+admittedly
+caterpillars
+two-thirds
+silvering
+adjuncts
+docked
+train
+reiterate
+nipped
+practice
+recorded
+mare
+stirs
+Mouth
+hived
+implying
+policemen
+distrusts
+strictest
+rendition
+disintegrated
+jutted
+Efficient
+frost
+mouthed
+flanked
+military
+testified
+bonus
+sidebar
+pasted
+Ha
+mash
+diaspora
+straddling
+jazzed
+biker
+overriding
+enlists
+amended
+recesses
+unpack
+mills
+gifted
+torched
+daddies
+fading
+dynamited
+heighten
+propagate
+determining
+artisan
+thicker
+mixers
+pollen
+needled
+tribal
+fraction
+bladders
+thirty-five
+conspire
+involves
+mules
+narrate
+shouted
+granting
+paramedics
+conflicting
+ought
+overhauls
+sourced
+devil
+obligations
+Wrath
+short-lived
+Notices
+neediest
+overlooks
+rattle
+animates
+defending
+inched
+Dutch
+policeman
+flip
+halted
+automatic
+dispersing
+mergers
+inlets
+canned
+ascertaining
+memoranda
+Mighty
+ambient
+homeless
+wheeling
+momentary
+ethical
+myriad
+sages
+wartime
+moister
+most charming
+sermons
+unfolded
+jaded
+charity
+forged
+Compassionate
+european
+rustle
+eagerness
+organises
+preschool
+concedes
+censure
+vogues
+convenes
+went
+alert
+outskirts
+dreamt
+filthiest
+rectangles
+milkmen
+flickered
+oh
+agree
+hairs
+friendly
+mercury
+severe
+budgets
+outlined
+interrogations
+clerk
+bypass
+ridged
+futile
+taking
+frightens
+pity
+shabbiest
+rallying
+alleys
+schizophrenia
+silk
+inscribed
+waters
+savager
+merest
+peacefully
+tighten
+english
+rescue
+more casual
+narrowed
+compulsory
+gospels
+flourish
+muzzles
+cheapest
+most pointless
+charcoals
+trolleys
+incumbents
+steward's
+flaw
+noodles
+tangible
+conceive
+corps
+shotgun
+stickier
+decipher
+stiffs
+torching
+adequately
+more restrictive
+scolds
+recapture
+Ornate
+industry
+encompassing
+subsidiaries
+expansions
+bicycles
+tries
+interpreting
+africa
+television
+slam
+raped
+rearing
+australian
+plaintiff
+bluffs
+awoken
+scientifically
+symbolize
+socialist
+virtue
+Warrior
+dutch
+baptist
+engulfs
+strangles
+deserting
+theft
+prick
+agenda
+peeled
+mind-sets
+incorporated
+Thee
+solvents
+swiveling
+nervousness
+caste
+insomnia
+decor
+angled
+mused
+theology
+buoys
+improved
+institute
+cultures
+geared
+stinking
+connecting
+refunds
+Forth
+reinstates
+jabbed
+Steroid
+pour
+showcased
+recessing
+rifles
+flowing
+enzyme
+immigrant
+sever
+cards
+unsafe
+diplomacy
+accentuated
+subscale
+chapters
+reveled
+pyramid
+notification
+swam
+slammed
+crumbing
+jesuit
+siding
+inductions
+divisive
+dioxide
+ramps
+cuffs
+transparencies
+questions
+sorrows
+gust
+downsize
+jointly
+Upstairs
+van
+overcoat
+puzzling
+commented
+felonies
+sniffed
+grieved
+emulated
+quivered
+everlasting
+door
+rusting
+whine
+discourage
+advertises
+fog
+mooning
+adult
+outweighs
+evaluating
+municipality
+ten
+kite
+loosing
+tooling
+mending
+sweetie
+sicker
+aluminum
+virtually
+more rebellious
+creaked
+betrayed
+shadow
+depth
+fastens
+wrappers
+reaps
+tough
+minimum
+sticking
+features
+knee
+palest
+remnant
+hustle
+analogs
+mechanism
+fairways
+variable
+airport
+incredible
+detectors
+suburban
+spreadsheet
+knotted
+ecologist
+Related
+exuded
+proctored
+compliments
+armchair
+undermined
+more fortunate
+neighborhoods
+absolute
+bless
+curb
+urge
+slightly
+joys
+beverage
+inked
+internships
+sergeants
+Grace
+lapel
+Immediate
+classroom
+fiercer
+winners
+pacific
+accumulates
+fusion
+vowed
+an
+shuffles
+meet
+Doctrine
+nourished
+confirmation
+rounded
+widely
+collaborations
+seller
+bonier
+courtroom
+Planning
+ascribes
+earn
+motivations
+databases
+achieves
+pavement
+zombie
+recalled
+detained
+scallop
+such
+Gifted
+crop
+squinted
+foul
+airwaves
+signal
+slumps
+angular
+boxers
+tsunami
+spam
+simultaneously
+flared
+undocumented
+secured
+laughing
+trails
+trophy
+rank
+careful
+trembling
+podded
+obliterated
+change
+supernovae
+hazier
+bulbs
+cratered
+personnel
+formulas
+choral
+waterway
+lemonade
+expanding
+labored
+possesses
+untouched
+limitless
+brigades
+geographical
+date
+pre-service
+squealing
+grannies
+affected
+nugget
+Almighty
+scorns
+therefore
+construct
+memo
+commended
+solid
+using
+ratios
+ambassador
+wandered
+imitating
+crossover
+re-elects
+Case
+bombed
+castes
+pegging
+reliever
+magistrates
+roused
+dizzies
+squatted
+fumed
+cooperate
+gulled
+littered
+spearhead
+phone
+liter
+practiced
+weeping
+amish
+jurors
+lessening
+copings
+yogurt
+accountant
+beach
+firs
+retiree
+surged
+verdicts
+someplace
+flicker
+slamming
+apply
+math
+pores
+grinds
+noticeable
+motoring
+enables
+airiest
+reunion
+audiotapes
+Continentals
+quotes
+sinning
+marginalized
+blond
+handcuffing
+steelhead
+chased
+archer
+motherhood
+West
+unfinished
+cellphones
+outfit
+Former
+sad
+flinched
+par
+rehearsed
+self-help
+spiritually
+convention
+oaks
+insulates
+tremor
+telescope
+bleach
+sold
+skies
+gardens
+bloused
+hefts
+judging
+circular
+maids
+sublimes
+Hearing
+scolding
+counsels
+enlisting
+selection
+shouldering
+gleaned
+czech
+artwork
+imparting
+accelerations
+stiffing
+innings
+serenest
+President
+correlate
+accenting
+railing
+sophistications
+stocks
+Jeans
+deficit
+most tragic
+competes
+Vocals
+plucking
+darkest
+periodically
+cleansing
+discontinued
+savory
+specifying
+incompetence
+symbols
+eliminating
+unraveled
+engineering
+runoff
+thirsted
+adheres
+pennies
+iranian
+trams
+scrubs
+sinisters
+censors
+groundwork
+chats
+sector
+downside
+multivariate
+approaching
+jersey
+bids
+entailing
+viola
+auroras
+holly
+Mostly
+dissatisfied
+jihad
+singles
+capitalizing
+converging
+assemble
+repressive
+sneering
+boredom
+supervisor
+wiped
+rejections
+obscenest
+subgroups
+preemption
+deliberating
+documentations
+scotching
+customizing
+groups
+warping
+lipstick
+Rabbit
+wreaths
+needing
+distributes
+renovation
+wads
+slickest
+perceptual
+flood
+orthodox
+aircraft
+cavern
+more traditional
+stenches
+most enjoyable
+more poetic
+means
+pictures
+most mechanical
+incidences
+revelled
+foiled
+contaminated
+flow
+evidently
+hiss
+physician
+interstate
+tricky
+unions
+cost
+ancient
+usual
+comprehending
+Gay
+more spacious
+thought
+laboring
+believers
+residuals
+dusts
+up-to-date
+most bored
+threaded
+Likelihood
+connectors
+surgeries
+borders
+Possible
+consented
+tiring
+sophisticated
+works
+ashtray
+pastor
+somewhere
+craned
+spilt
+skewer
+paw
+Ops
+littlest
+joins
+pellets
+Territorial
+questioned
+carefully
+strips
+electing
+min
+monarch
+drops
+thou
+most depressing
+burials
+birdieing
+trooper
+lightning
+Imposing
+monuments
+banning
+bluffing
+steamings
+galled
+aztec
+hunch
+orbiting
+expansion
+trolley
+racer
+ranched
+fishes
+punished
+sobbed
+pollens
+direct
+villa
+granted
+perching
+butts
+optimal
+sheens
+Psychics
+waiter
+insurgency
+royals
+Cosmic
+sandwiching
+barring
+characters
+shadowy
+perpetrator
+po
+aprons
+darker
+angels
+tab
+tracts
+putt
+shortest
+momentarily
+champion
+recommending
+retrospected
+Pines
+easels
+housed
+pierce
+overtime
+deployment
+tripod
+loud
+postulates
+conduit
+stomping
+skis
+swamps
+China
+selves
+muzzle
+budged
+dullest
+Free-market
+mushrooming
+more responsive
+fairness
+pivoting
+confines
+gifting
+portfolios
+quantities
+hoist
+mob
+luggage
+enable
+zoo
+slowdown
+cake
+sensational
+reflected
+techniques
+shaker
+staffing
+cluttering
+spears
+bout
+strengthens
+gasped
+hassled
+reluctance
+finite
+warmth
+alienate
+tackle
+thoughtful
+polices
+history
+careering
+Guinea
+Terrifying
+initiate
+algebra
+disorder
+tireless
+issuing
+trenching
+unloads
+Virgin
+gallop
+accruing
+parsleys
+paste
+scribbles
+artichoke
+vests
+transmission
+interior
+barn
+spill
+vampire
+appetizer
+reimbursement
+ex-husband
+cements
+uninsured
+impractical
+rockers
+integrate
+stud
+ignition
+knifed
+accompany
+gull
+screeching
+spearing
+breakthrough
+lamenting
+upholds
+undercutting
+Peg
+jasmine
+compose
+knocked
+more respectable
+suns
+fluffier
+trellises
+more distant
+more tenuous
+jerked
+soothe
+binges
+hurry
+milled
+article
+seafood
+grocers
+projects
+bulged
+sped
+eats
+besides
+bending
+accidents
+durations
+postseasons
+shell
+confinement
+Thrifts
+gas
+sourest
+fishers
+sore
+briefings
+advice
+junction
+sheerest
+more sensible
+chancellor
+Generally
+levelled
+Lavish
+backlash
+more impressive
+religious
+Defence
+sacrifice
+brazilian
+staff
+prone
+wrings
+twentieth-century
+belongs
+pays
+destinations
+statutes
+pilot
+publicity
+nicotine
+devising
+boil
+buying
+Small
+corrupt
+apparel
+polluting
+meanings
+quitting
+abusive
+Ammunition
+libraries
+contradictory
+devouring
+duty
+yourself
+rued
+connected
+colliding
+most visible
+weeding
+Scoff
+scripts
+threatening
+Aerospace
+alloys
+debuts
+magnitudes
+sensitive
+Accidentals
+hurl
+skews
+colts
+luncheon
+gearing
+arrival
+postured
+more convenient
+anova
+Bund
+edited
+obesity
+saps
+Usable
+canary
+levies
+freaking
+canoes
+ping
+pedagogy
+boycotting
+sugars
+evidenced
+Temp
+scrutinizes
+supremacies
+participated
+Congress
+probability
+steadies
+acculturation
+snapping
+fielders
+Bye-bye
+grossing
+enthusiasts
+beliefs
+they
+hauls
+holiest
+Record
+realist
+lively
+anticipating
+formulation
+owns
+handling
+flounder
+blasting
+giddy
+waffles
+wound
+steelheads
+billboard
+underway
+most traditional
+barked
+horribles
+bank
+proves
+bonded
+dooms
+accommodations
+toned
+swath
+curtaining
+prosecutors
+Flashback
+technologies
+record
+eschews
+quest
+abandoned
+porches
+reopened
+bucket
+exciting
+Millers
+chairmen
+systemic
+propose
+caught
+revitalizes
+cat
+greasier
+lakes
+backstage
+scarring
+symposium
+gamed
+dollar
+February
+radioactive
+endangers
+trolleyed
+queued
+tampered
+quarry
+toes
+interacts
+twenty-five
+mule
+fielded
+bleaching
+funk
+localities
+stiffened
+belgian
+most rugged
+shears
+conquers
+coaxing
+proponent
+grandma
+citruses
+might
+wax
+instructional
+pickets
+Yugoslav
+absolutely
+improper
+mute
+fluorescent
+handout
+feeblest
+numb
+confounded
+carting
+renditions
+Hilarious
+dummy
+fainter
+Published
+flatter
+applauds
+revolutionizing
+password
+lighting
+buster
+peas
+sea
+debacle
+Detour
+effort
+island
+buzz
+niches
+delegations
+warned
+selections
+ramifications
+diagnosis
+enclosure
+sketching
+Soviet
+quip
+swamp
+psychiatrist
+preaching
+Nuclear
+lurk
+pettiest
+torpedoing
+conscientious
+savannah
+couriers
+released
+wobbled
+builders
+Galactic
+looses
+grander
+worldview
+harbor
+effecting
+hidden
+skims
+hawked
+unlikely
+long
+rooted
+entirest
+omitted
+filled
+unsatisfied
+satisfies
+finals
+begins
+despises
+photograph
+synthetic
+pain
+fished
+winter
+dampen
+juggled
+sniffs
+beforehand
+gigantic
+induction
+possibly
+chins
+excursions
+dispatched
+flickers
+opted
+executes
+recorders
+stashing
+discussion
+inflammatory
+headmasters
+pronounced
+brooks
+lining
+crocodile
+willingly
+mulching
+fleshing
+mailboxes
+worried
+conductor
+manner
+auctions
+memorable
+slippery
+uncle
+managed
+wagging
+electron
+Emotion
+classmates
+shaver
+inching
+blouses
+abounded
+of
+feast
+lust
+peach
+pinpoint
+fig
+squat
+pirating
+silkiest
+melon
+caucus
+accreditation
+names
+alone
+more positive
+administers
+imposition
+environmentalist
+nobility
+need
+enters
+posing
+adopt
+mammal
+coalitions
+televisions
+confused
+solar
+romancing
+improvement
+attaches
+coals
+wonderfully
+pile
+keyword
+planet
+readout
+gain
+deployed
+enhance
+dotted
+conducive
+affirms
+pathetic
+circled
+caveat
+sacks
+leisure
+mom
+extravagant
+relaxed
+beneficial
+would-be
+madame
+visibly
+believing
+Jazz
+transforming
+dough
+feelings
+flexed
+climate
+nightmares
+construes
+ascends
+outlining
+evolution
+outlawed
+anthologies
+more intelligent
+Forearms
+disguise
+cry
+animal
+Minor
+peace
+weds
+taped
+faulted
+dins
+tucked
+cultivation
+reasoned
+fantastic
+English
+delusions
+corruption
+Armenian
+publishing
+psychosocial
+stamping
+petting
+verbs
+Satires
+garnished
+trickled
+hypotheses
+debilitating
+incurred
+Jade
+situation
+Elder
+query
+guarantees
+motives
+therapy
+hebrew
+unify
+questionnaire
+overhauling
+shuffled
+biotech
+taxi
+sulfur
+twenty-eight
+smokier
+Aft
+rebellion
+Velvets
+at
+rioted
+secure
+serpent
+Symbolic
+novelty
+toothpaste
+invisible
+Goods
+hen
+ye
+giddies
+destabilizes
+operation
+remote
+imperatives
+railed
+dialogues
+royal
+citation
+inquiring
+oceania
+shamed
+dawn
+dweller
+prepare
+replacement
+someday
+olympics
+mortared
+quivering
+panting
+sizeable
+typed
+mobilize
+cubed
+serving
+Surprising
+exemplary
+cross-country
+donation
+fitness
+leisurely
+thoroughly
+attesting
+reappeared
+vaguest
+inclinations
+stash
+thrills
+boo
+surreal
+light-year
+dishing
+idealest
+vigorous
+vogue
+disappear
+malls
+heart
+dividends
+shelled
+cloaked
+laboratory
+fascinating
+rebounding
+Interference
+heavy
+replicas
+straddles
+conducts
+withdraw
+fact
+tone
+emperors
+leveraging
+shifting
+testimonies
+tumbled
+hurrying
+envelop
+murmurs
+Prime
+standardizes
+squirrelled
+parameters
+honeymooning
+salutes
+collapses
+latest
+symphonies
+quailed
+cited
+canceled
+Columbine
+ascribe
+sauce
+principle
+supreme
+usable
+Utter
+dissents
+moves
+Steroids
+veteran
+Feud
+chipped
+brilliant
+ordeal
+pantries
+populating
+controversial
+rooftop
+clarifying
+shovel
+doom
+on-screen
+ancestry
+saloon
+heal
+appreciating
+morgue
+dormitories
+cartoons
+loot
+cutout
+jamming
+flattened
+caretaker
+dare
+coherent
+outputted
+dorm
+incurring
+shouting
+orioles
+lightings
+more graceful
+modifies
+moderately
+lavishing
+honey
+Provisionals
+Geometric
+app
+destabilize
+Mm
+facials
+odor
+snows
+ethiopian
+proficiency
+nonstick
+recycled
+primal
+fussed
+coughing
+mimics
+senator
+shepherded
+staffers
+suggesting
+chicken
+corrupting
+dazed
+lumping
+compassion
+indifferent
+prosper
+ventured
+season
+slipper
+most versatile
+hourly
+stabbing
+Arid
+Drunk
+wateriest
+fucks
+packed
+gland
+vibes
+carving
+outlets
+rename
+starch
+surgeons
+mastered
+trusts
+sculpting
+bounties
+cruisers
+lighthouse
+telegraph
+fuse
+more imaginative
+bacterium
+fictional
+alcoholism
+legislature
+boy
+resurrection
+unification
+supposing
+manifestly
+published
+accusing
+exploratory
+capsules
+garnering
+disputes
+mail
+applicable
+consolidates
+quicken
+assisted
+toward
+insuring
+assure
+concessions
+bitters
+motioning
+Harbour
+sandier
+cads
+seed
+logo
+attacks
+plights
+cease-fire
+jewelry
+tablets
+edition
+summary
+receiving
+collective
+sherries
+jog
+groceries
+territory
+integrates
+discontinues
+guise
+abuser
+bathtub
+Physics
+lever
+favoured
+bolt
+risks
+happily
+lowed
+japan
+accidentally
+Ethiopian
+pedestals
+retold
+mrs.
+preferences
+throngs
+variables
+preserve
+parasite
+tenors
+fattier
+Scotland
+taunt
+handlebars
+perfumed
+conveniences
+journalist
+mimic
+jeopardize
+spurts
+exited
+guarded
+proceeded
+engendered
+bleeds
+cropped
+pitting
+encoded
+cursed
+servant
+Profitability
+loosens
+inner
+interning
+Symbolism
+fluxes
+shallowing
+scotland
+cables
+battleground
+communication
+stein
+authored
+permanently
+contraction
+Staff
+predators
+cries
+watered
+ruby
+livelier
+tidier
+slighting
+turtle
+fists
+most eventual
+intimidating
+simulates
+dictates
+reactions
+generally
+homecoming
+rarity
+thermal
+bouquet
+spending
+mustering
+stinging
+pictorial
+flat
+wafted
+billow
+follow-ups
+midway
+tweaking
+neutrality
+accusation
+Fleck
+defer
+prayed
+remainder
+frantically
+inherent
+frequented
+bowl
+pricier
+evidencing
+tasked
+depositing
+room
+bicycled
+but
+blander
+sewed
+lasts
+most distant
+child
+complains
+lashed
+topples
+Tolerance
+Iraq
+penned
+orchard
+corpses
+liberalism
+refilled
+rotates
+regain
+gains
+stained
+worsen
+allocate
+auto
+installation
+gumming
+stifles
+polo
+somberer
+hygiene
+diaries
+offensives
+ambiguous
+breaching
+specializes
+Intimate
+steady
+bricks
+hawaiian
+Cinnamon
+classifications
+beheld
+Incense
+dragons
+theoretically
+armors
+clamps
+stateliest
+zinced
+Abs
+rag
+cowards
+convinces
+most thoughtless
+undergo
+finishing
+eliciting
+elegance
+completion
+ringing
+brackets
+scoff
+pansies
+civility
+pills
+original
+smuggle
+horriblest
+brand
+woos
+apology
+bakers
+valentine
+remarries
+can
+disadvantages
+hatches
+gathering
+assurance
+pilgrim
+Eighty
+jobless
+prize
+switzerland
+subtitles
+counselors
+locks
+Live
+quirk
+Bravo
+Odour
+converting
+Towering
+retrains
+cornerbacks
+characterizes
+approaches
+Too
+viruses
+pocket
+BARK
+matting
+multitudes
+sillier
+spin-off
+paralysis
+hushes
+continual
+jurisdiction
+harmed
+inclining
+respectable
+more obese
+lessened
+atom
+predecessor
+erotic
+offseason
+noises
+merchandise
+addict
+more regal
+alphabet
+gentries
+indicator
+olympic
+easy
+asbestos
+provide
+castling
+oilfield
+feasible
+marks
+informal
+Cathedrals
+excursion
+charismatic
+retarded
+pathologists
+heft
+relationships
+damming
+puddles
+Entitlement
+aimed
+lawyer
+rakes
+obtains
+Attribute
+Least
+restore
+upstairs
+trusted
+gooses
+bracelet
+ethnically
+reserving
+creation
+pine
+cruellest
+receipts
+appointees
+nowadays
+adolescence
+Inadequate
+mails
+used
+disrupts
+forcing
+setups
+Monumental
+flyers
+clumsier
+protested
+bowmen
+bullies
+bribes
+gamma
+weaknesses
+wombs
+eyepiece
+threatens
+surges
+unveiling
+tapering
+vocational
+turbine
+considerably
+privatized
+obstacle
+figurative
+jolting
+blast
+pawing
+plowed
+appeal
+scenario
+leashes
+creditors
+modernize
+boned
+glowed
+brutaler
+uncovered
+visually
+nears
+gait
+plugging
+gunshots
+quietness
+humans
+vascular
+lesbian
+negatives
+relics
+Even
+Independents
+invention
+reconstructs
+involved
+stubbed
+completes
+disregard
+cliche
+observe
+revitalize
+animals
+Entrepreneurs
+re-establishing
+carnage
+chorused
+recognising
+composition
+alumni
+most dreadful
+puffs
+tastiest
+more fashionable
+distortion
+showcase
+gropes
+gratitude
+derby
+ivies
+paws
+undertakings
+vine
+snailing
+extinct
+heightening
+Jeeps
+braves
+prevents
+happened
+paired
+violet
+atrocity
+sandwiches
+shoring
+herb
+handicapped
+falling
+Quick
+thirst
+shits
+manufacture
+formal
+gangs
+Pronounced
+cladded
+lengthwise
+unfitted
+Deepen
+conclusion
+dressings
+subordinate
+burners
+disciples
+orbitals
+unmistakable
+wrecking
+Rhetoric
+most fabulous
+hooker
+give
+warmer
+disparate
+strews
+freelancing
+rails
+hesitation
+Jig
+dollies
+logs
+dependencies
+punctuate
+top
+privately
+Kosher
+imaging
+soundtracks
+Brim
+Downhill
+booing
+souvenir
+believer
+deplete
+bosnian
+puts
+Democrats
+passerby
+seen
+century
+layout
+more valid
+auditoriums
+Neural
+curtail
+awaited
+file
+walkings
+squeezes
+brooms
+Pentagon
+Alpha
+leash
+ashamed
+groove
+judge
+exquisite
+overruled
+ultimately
+money
+researchers
+havens
+Depressed
+wood
+entrepreneurs
+cathedral
+produces
+accepting
+counselor
+braided
+detaining
+pellet
+doubted
+homelands
+many
+fall
+Backward
+mortgage
+peddle
+shank
+visits
+Earth
+campaigning
+handlebar
+plains
+pursues
+veer
+shawls
+employing
+take
+Virtual
+comforting
+academia
+Suspensions
+shooter
+entitle
+napping
+midterm
+rendezvoused
+seventeenth
+close
+farmers
+elect
+untidier
+most impressive
+unconsciously
+wavelengths
+Twenties
+surrendered
+roasts
+definition
+labs
+blaming
+dryers
+praise
+stalls
+grafting
+most numerical
+vans
+microscopic
+researcher
+artificial
+which
+quantum
+validated
+twitching
+browns
+hungry
+whole
+upset
+isolations
+pore
+Serene
+life-threatening
+utilities
+frowned
+embarrassment
+politicians
+Down
+candidacies
+cocking
+screw
+turn
+dumb
+breading
+plantations
+buddhist
+supplying
+ketchups
+Any
+vibing
+verbals
+wrecks
+Dental
+stout
+high-speed
+realists
+almonds
+knowings
+transcribing
+blister
+raps
+appliances
+Math
+assassinating
+oven
+presided
+moderates
+predictor
+Czar
+relaying
+dominance
+characteristics
+continuations
+stumps
+impoverishing
+deals
+cable
+gelled
+left
+activated
+shriek
+hurts
+interplaying
+Turkey
+dinning
+lucratively
+boutiques
+shivering
+spike
+most rigid
+pitch
+centring
+beam
+blooming
+backpack
+complications
+microbe
+bucketing
+ornate
+caters
+quivers
+allows
+netting
+subtract
+chains
+seagull
+ceremonials
+misconducting
+youthful
+keeps
+hardier
+Rap
+infections
+safeguards
+yoruba
+whimpers
+brimmed
+reconciliations
+titles
+firmly
+washroom
+dumped
+squealed
+retrieve
+colt
+partitioning
+shone
+disclosure
+Limitless
+belts
+shrouds
+soldier
+more radiant
+sailor
+import
+classes
+boot
+National
+adjacent
+queered
+furrowed
+engulfing
+minimizes
+pistol
+controversy
+tears
+drizzling
+witnessing
+Byzantine
+fanning
+plying
+sparse
+Surplus
+presidents-elect
+feta
+stairwells
+anchorages
+paperbacks
+painter
+orbital
+Upstate
+most imaginative
+capitol
+vices
+pondering
+malpractice
+fantasizing
+arrive
+honeymoons
+deferring
+caved
+override
+constructing
+gripe
+black
+unidentified
+soys
+filipino
+suspends
+shuttering
+locomotive
+militant
+spewing
+burglary
+storing
+populations
+shams
+seating
+item
+architecture
+bandages
+severed
+incorrectly
+migrated
+crueller
+supermarkets
+interventions
+gets
+campuses
+parodying
+capitalists
+omission
+presents
+preachers
+betray
+usualer
+define
+elevates
+most rhythmic
+staler
+remodels
+relies
+rooting
+Vigorous
+holding
+pissed
+hastening
+toughness
+methodist
+beaver
+equilibrium
+sabotaging
+Staging
+submarine
+glorious
+failed
+inhale
+despairing
+diner
+psychotherapy
+fleets
+interviewers
+nonsense
+imagines
+caching
+renders
+defensive
+patriot
+static
+facilitated
+death
+sixteenth
+administered
+proclaim
+branched
+regression
+cripple
+Else
+beating
+strew
+rescued
+formatted
+naive
+Significant
+rubbing
+exercised
+world-class
+more genuine
+minority
+manor
+Semicircle
+sunflowers
+depictions
+most special
+imagine
+Constellation
+interrogation
+dishes
+cellphone
+Java
+seasonings
+reeds
+sidelining
+warehoused
+disruption
+concealing
+penalizing
+amusement
+vulnerabilities
+awakens
+believe
+minister
+treatment
+travels
+reprimanded
+illustrates
+backgrounds
+economist
+wans
+allergies
+earlier
+complete
+tankers
+souls
+Present
+established
+flocks
+terribler
+Powerful
+crumb
+inspections
+donated
+anthrax
+fretted
+weak
+helper
+galloped
+age
+blighted
+pawning
+steak
+perpetuate
+subside
+Suburban
+solemn
+autism
+promise
+true
+broad
+sub
+liveliest
+starving
+espouse
+establishing
+bronzing
+marinades
+ink
+mite
+proclaims
+Gross
+tens
+stormed
+advisers
+autonomous
+souvenirs
+racists
+upside
+bleaches
+statement
+arresting
+rainiest
+devoted
+truce
+warming
+stereo
+nevertheless
+sinking
+crossovers
+upward
+cargoes
+betting
+animated
+more assertive
+loaf
+forests
+opaque
+racing
+Fiscal
+streaked
+hills
+regrets
+rips
+clutch
+veterinarians
+dispense
+poked
+entire
+judges
+leverages
+instrumental
+blandest
+rounder
+chiefs
+possessing
+barraging
+pressing
+midnight
+flier
+mucks
+facilitating
+numbest
+waves
+reading
+most useful
+layered
+busters
+lovemaking
+gags
+roped
+cheaper
+winking
+shakiest
+entitling
+curbed
+burglars
+inspection
+Solo
+Taxation
+attitude
+dna
+differentiated
+matted
+stoops
+foxed
+satisfied
+assessing
+more intuitive
+individualism
+two-time
+penning
+Mallard
+eaten
+laborer
+granite
+bankrupted
+Paranoid
+none
+murderers
+briefs
+Aggression
+gaze
+leaked
+impeded
+containing
+discontinue
+straightened
+vessels
+imply
+institutions
+deadliest
+pretended
+folds
+typist
+bogging
+most enthusiastically
+platter
+tort
+availing
+spurred
+wedge
+dependence
+zone
+her
+subscribe
+purees
+blasted
+Scotch
+elderly
+referred
+fluctuation
+Wednesday
+prospect
+busiest
+hay
+rubble
+ponies
+remain
+effective
+lingered
+contradicted
+score
+powder
+vases
+generosity
+too
+mitts
+zebras
+shrine
+Genetic
+handwriting
+mystery
+sprays
+traffic
+gloried
+consent
+saltier
+Colour
+compiled
+punks
+armoring
+life
+stormy
+tinging
+paddled
+objecting
+embargoes
+quota
+ridiculing
+arranged
+more coastal
+Blue
+marinading
+pimped
+commercial
+albums
+balloted
+intrigues
+nylons
+stilling
+rouse
+homeowners
+flashes
+characterize
+contributes
+liters
+lushest
+triumph
+Vintage
+darkened
+grayed
+monitored
+Barefoot
+taboos
+binding
+scrolls
+desire
+parole
+etc
+parading
+hollers
+natural
+happens
+wind
+curiously
+latched
+hostility
+mooring
+boldest
+surf
+tunics
+prostitution
+devotees
+improvise
+hastened
+convenient
+meter
+faithfully
+cooked
+regimented
+Mad
+hosted
+beasts
+notable
+strikingly
+cycled
+specifically
+high-profile
+hampers
+subscribing
+stared
+emit
+calculates
+papers
+well-being
+stashes
+horses
+costly
+more ornate
+nitrate
+lift
+esteems
+preside
+mediated
+ideals
+seminars
+nearly
+causes
+horsepower
+hostilities
+innovative
+moods
+palaces
+boundaries
+trademark
+irrational
+foresaw
+clustering
+densities
+childbirth
+tailed
+capitalizes
+mated
+slicked
+frosts
+receptor
+empowers
+Majesty
+Mini
+gazed
+powerful
+One
+cryings
+sport
+merchandising
+admissions
+trace
+straying
+checklist
+ransom
+gill
+shopping
+prefaces
+garnishes
+nature
+Apartment
+shining
+monkey
+scours
+fascinated
+collage
+zones
+microwaves
+makeovers
+trophies
+emigrating
+light
+unveiled
+huskiest
+undertook
+trade
+reliance
+companies
+colored
+adviser
+staggering
+epidemics
+selling
+rankings
+incremental
+benchmark
+organize
+voted
+fallout
+sheriff
+state
+was
+villages
+presume
+delusion
+trots
+pilgrimage
+professed
+reiterates
+serener
+smuggles
+Dynamics
+bowel
+mark
+overwhelming
+legislations
+steer
+gives
+Tenet
+amend
+lesions
+alloy
+zips
+Influx
+capitalize
+waterfronts
+realism
+most favorable
+Can
+hitched
+additive
+convincing
+quantifying
+chirped
+migraine
+flashiest
+cod
+superstars
+dreamers
+mastering
+pulped
+terrified
+illness
+phonier
+Infamous
+noisy
+fibers
+matchup
+linen
+rosiest
+lobes
+invents
+Thug
+composers
+mosaic
+fuss
+kicked
+unsettles
+maximum
+Individual
+Oppression
+showdown
+jumpers
+exists
+emergency
+contingent
+Eligible
+absorbs
+hands
+inviting
+nigger
+blames
+tournaments
+freer
+props
+beeping
+wiggled
+Phenomenal
+clads
+minute
+frightenings
+fluttered
+confuses
+most abundant
+Romance
+signalling
+bores
+ripple
+banish
+partnership
+fries
+Sublime
+biological
+rises
+supposes
+thrilling
+drone
+yet
+legitimates
+quakes
+ratting
+bouquets
+vest
+expenditure
+acrylics
+more murderous
+pulp
+fueled
+glen
+blared
+excitement
+feathers
+snaps
+definite
+mugs
+enhancement
+blazes
+wandering
+espresso
+warheads
+recapturing
+booms
+reassurances
+pricked
+storefronts
+torrent
+repertoire
+downplays
+needle
+tonight
+specialize
+most golden
+balds
+reservations
+adherence
+Kitten
+etching
+more productive
+april
+translation
+punching
+meshing
+detrimental
+renderings
+rejecting
+statue
+trickier
+cheeks
+considered
+convey
+pulsed
+freezer
+Sir
+obeyed
+rumble
+altogether
+leadership
+hybrid
+skeptics
+Via
+fluting
+sweated
+bands
+styles
+potting
+compressions
+gusted
+vocabulary
+beginning
+soybeans
+starling
+want
+prod
+Tip
+corks
+retires
+flavors
+stunning
+coined
+handlers
+pipes
+heralding
+resists
+gutted
+outlaws
+abducts
+own
+replacements
+utility
+long-standing
+allured
+Baseball
+snore
+india
+assaults
+prospects
+puzzle
+orienting
+paths
+splendors
+chancellors
+mirror
+fatties
+downsizes
+bedding
+orchestrates
+Mine
+administrative
+biases
+stiffnesses
+explodes
+gallops
+parity
+machines
+underlie
+yield
+hyping
+entices
+bulls
+Spanish
+tip
+forayed
+looted
+removed
+most famous
+antennas
+most outstanding
+broadcasts
+deception
+Senator
+ganged
+camel
+proctor
+biologicals
+grasp
+store
+impose
+rumours
+vacants
+diarrhea
+blouse
+absorbing
+trajectory
+colonies
+seamless
+accuse
+brinks
+punctuation
+fang
+heaths
+laces
+plaster
+Box
+hispanic
+wholesaling
+swaths
+raiders
+cells
+conversed
+halt
+tangles
+reduction
+seemingly
+return
+strain
+Gulfs
+outer
+covenants
+vanished
+comprises
+limp
+stings
+sighs
+versus
+reverted
+crumbled
+manure
+conversing
+contributor
+drugged
+perpetuated
+plummets
+most remarkable
+dispatches
+detain
+orphan
+mac
+achievable
+tied
+absented
+convincings
+curl
+maternity
+radar
+gentlest
+volunteers
+murkier
+charge
+interests
+authoritarians
+misunderstands
+Reformer
+Only
+blinds
+barbecues
+liable
+sneezes
+wrenched
+actual
+Timing
+woven
+egged
+Booms
+slewing
+linear
+agreed
+toothbrushes
+vocation
+straightforward
+psychiatry
+bunkering
+wife
+bottoms
+MUD
+torturing
+viciousing
+empowering
+revises
+disgracing
+vulnerability
+Reverence
+purer
+identify
+poises
+cabbage
+reuses
+median
+enterprise
+ferociously
+pleasings
+intersected
+Ukrainian
+grassier
+offices
+hipped
+lover
+currency
+temporal
+proceeding
+attained
+specifications
+frustration
+turbines
+equals
+passings
+newscasts
+dazzling
+flattered
+tapers
+everybody
+gesture
+brisker
+Cabin
+misdemeanors
+assimilation
+preheats
+preheat
+crusader
+affecting
+fingerprinting
+sunning
+prizes
+penalty
+condoning
+sodded
+newcomer
+yellow
+traps
+nickname
+waiving
+recycle
+fluxed
+stakeholder
+mocked
+transitioning
+adjunct
+most relevant
+salute
+favored
+bandaged
+cribbing
+Maths
+profiled
+overlapped
+dissipate
+coexisted
+more cheerful
+poundings
+discerned
+gingered
+glimpsed
+ghost
+poise
+triggering
+dull
+Englishman
+more powerful
+defenders
+fulfilled
+bend
+misconception
+incompatible
+foresees
+bumped
+favors
+huh
+sounded
+Providence
+squints
+hoot
+expand
+narrow
+endorses
+semesters
+boozed
+resurrecting
+saving
+scrutinizing
+launching
+health-care
+feasibility
+sipping
+longest
+guilt
+autumn
+cleanups
+moonlighted
+apples
+firmed
+wrinkle
+cramping
+patting
+toured
+improves
+insists
+toilets
+cubbing
+confront
+rams
+suffers
+dinned
+perceiving
+engenders
+halting
+glided
+handles
+restructure
+safes
+hammers
+co-hosts
+accompaniment
+snacks
+billions
+derivatives
+metric
+atlas
+engages
+prescriptions
+astonishment
+formulates
+athlete
+rackets
+trampling
+shorelines
+alpine
+rules
+whack
+downplay
+additions
+tester
+rouges
+anger
+lane
+instructs
+intensifies
+letting
+curtained
+mobbing
+saw
+lean
+investment
+reception
+kites
+winding
+glean
+starching
+crusts
+program
+consulted
+Absent
+casualty
+cancels
+chemical
+contingencies
+japanese
+shepherds
+ginning
+synthesizing
+swing
+frightening
+heritage
+lightens
+scorned
+clamped
+emblem
+flashbacks
+Wood
+incarceration
+suing
+subsidies
+scouring
+invoked
+Halloween
+unfit
+t-shirt
+circuses
+dashes
+science
+merger
+revitalizing
+origins
+preferential
+hopes
+balks
+stomaching
+cursing
+matters
+Initiative
+flock
+width
+huskies
+rose
+governs
+relocated
+hating
+Amber
+fronting
+congratulations
+water
+chasing
+exit
+gasps
+capes
+withdrawal
+pups
+tuck
+slapped
+attrition
+competent
+phenomenal
+despised
+bracketing
+translators
+earrings
+kickers
+embargo
+approach
+burned
+escalate
+preached
+terms
+co-workers
+ozone
+prejudice
+sadly
+fittest
+yellows
+entertaining
+remedy
+exempts
+harassment
+most muscular
+slipping
+Phoenix
+pronouncement
+grunting
+protective
+blockbusters
+tars
+repairing
+funerals
+remarry
+suspicions
+migration
+pathways
+shrieks
+Awareness
+Avert
+clusters
+incarnation
+bests
+shores
+orients
+intimidations
+conceived
+wardens
+devour
+tempts
+stewardess
+outposts
+giftedness
+intersection
+revenue
+wells
+more familiar
+overdoses
+squash
+regularly
+contradiction
+examine
+flashed
+guidebooks
+Tech
+fail
+relegated
+overrun
+gurus
+strange
+aloud
+fined
+instinct
+taunting
+details
+embodied
+issuers
+affirmed
+shacks
+clowns
+litters
+combustion
+appointed
+weapons
+steamed
+corroborate
+bought
+advantages
+punctual
+happenings
+schemes
+card
+standard
+infinitely
+superman
+challenging
+electricity
+first-hand
+strawberries
+jail
+foiling
+consummates
+ghettos
+ending
+statesmen
+refilling
+preoccupations
+discos
+truth
+czars
+analogies
+sure
+maya
+dispersed
+gauging
+co-star
+rejected
+enacting
+programs
+jaws
+ordering
+depletes
+regarding
+adversary
+require
+unfold
+exile
+self-reported
+most popular
+declares
+territorial
+caregiver
+classics
+grandpas
+terror
+eject
+statistics
+deliver
+italian
+swears
+fantasied
+slap
+handicaps
+aggravates
+buckets
+renews
+sown
+revivals
+contraries
+align
+distance
+stretcher
+protagonists
+narcotics
+rope
+wear
+oozed
+sensed
+feminism
+minima
+spree
+charging
+weakly
+addicted
+accelerators
+grapefruits
+needy
+montages
+rigor
+dazzle
+Provider
+spy
+focuses
+badminton
+dammed
+decade
+Violets
+Peter
+roundabout
+builder
+installations
+physics
+crane
+foundering
+Recycling
+gleam
+inaudible
+Paradoxically
+correlations
+beaks
+husband
+glamors
+assassin
+vinaigrette
+makers
+dissolve
+mama
+locally
+rainbow
+fret
+sioux
+videotaping
+quotas
+milkier
+Quite
+Silver
+impolite
+adoption
+crumbling
+shrimped
+Sideways
+flirts
+automakers
+globally
+sacred
+budges
+more fruitful
+Joyful
+covenanted
+surrender
+encyclopedia
+pluralism
+foreclosures
+narrative
+devotions
+tangling
+High-level
+dabbed
+doubt
+covert
+impatient
+Secret
+calling
+picket
+hares
+comparative
+deepest
+helpers
+futures
+abandonment
+specialist
+picnicking
+quirks
+honour
+descends
+awhile
+dated
+kipped
+irritation
+heated
+flails
+underneath
+poverty
+specify
+babe
+roams
+shareholders
+hamstring
+bitterly
+bankrupt
+cues
+Mum
+Melting
+greenhouses
+Calcium
+facet
+fairies
+ditch
+staples
+snapshot
+rendezvousing
+sympathetic
+tap
+clashing
+pinned
+Scientific
+start
+linens
+gorges
+perennials
+lounging
+nicest
+glowing
+showrooms
+capitalism
+together
+differs
+hanged
+activating
+call
+filming
+directions
+naïve
+driver
+retrospecting
+saner
+transferring
+waitressed
+romanian
+coincides
+waterproofed
+roots
+delicacy
+sub-saharan
+Ambulance
+thinkings
+intensified
+composing
+solitaries
+Trademark
+eloquent
+mattered
+folders
+quantified
+replicating
+stoop
+mistook
+floated
+welcoming
+overeaten
+realizations
+articulating
+columnist
+all-american
+genetically
+Tibetan
+questionings
+shammed
+headed
+cashes
+wagers
+detailed
+squadron
+impairment
+scanting
+co-starring
+absent
+comparison
+bison
+Uncertainty
+sponsors
+cohorts
+tonic
+cinnamons
+contoured
+daze
+constrains
+municipal
+shies
+registering
+vacuums
+imprisoning
+recount
+breeding
+amens
+revelling
+peeing
+pines
+shied
+primaries
+kneel
+basing
+Peoples
+more pointless
+projecting
+varying
+sibling
+misses
+product
+contemplated
+service
+stains
+shovels
+rationale
+magazines
+queering
+tie
+legally
+dimming
+slaughter
+departure
+cohesion
+listeners
+kings
+rattles
+patriotic
+casseroles
+splitting
+section
+papa
+agricultural
+fishermen
+educators
+sued
+keep
+hustling
+seconding
+busting
+barons
+clubhouses
+worries
+egg
+lay
+heals
+supervisors
+wetland
+fluted
+handshake
+food
+Grave
+settlement
+impedes
+blushes
+chills
+Logistics
+battles
+inheritance
+sponge
+cloud
+survived
+clearance
+Summertime
+resonance
+antarctic
+dress
+Minimum
+intermediates
+ampler
+roman
+later
+financial
+cautioned
+hisses
+crudest
+trumps
+most truthful
+salaries
+redesigning
+endorsed
+whims
+first-class
+terrorizing
+dude
+injuring
+precipitation
+quails
+videophone
+unpleasant
+turned
+poker
+plumed
+ramp
+fill
+detainees
+underscored
+swelling
+collaborates
+submerges
+secretary-general
+sailing
+networked
+coffin
+reluctantly
+patties
+contestant
+halve
+Harp
+young
+dependent
+convent
+favor
+peeps
+Repeatedly
+tutoring
+widows
+amplest
+guest
+kiss
+mower
+zucchinis
+chance
+acted
+divine
+chirp
+Stringent
+blockade
+warp
+handy
+windshield
+jungle
+Seated
+forty
+panicked
+holing
+trivial
+blindness
+reaping
+interfered
+flavoring
+dribbling
+extent
+accustoms
+milkman
+hovers
+roll
+distant
+boyfriends
+around
+peripherals
+uncertainties
+student
+preferring
+Politics
+vigors
+subtle
+eradicates
+funneled
+forward
+pluming
+fixed
+co-starred
+auditions
+egging
+alongside
+beefs
+discomforts
+portrayal
+Staircase
+quickens
+prerequisite
+progressive
+postponing
+most respectable
+environmentally
+paralleled
+Raven
+craftsman
+expending
+Command
+forecasted
+orchids
+visitations
+celestial
+infants
+spout
+supervision
+monday
+supplier
+transitional
+immersion
+scrapped
+hunters
+segment
+detects
+defends
+overran
+inflicts
+urgent
+moss
+bio
+Hand
+cuffed
+creep
+Identifiable
+bomb
+originating
+feed
+avalanching
+vividly
+comprehended
+mosquito
+pigmented
+generation
+waving
+powerfully
+violinists
+hedges
+authentic
+machinery
+noblest
+cement
+important
+trucking
+hustles
+mystic
+well
+studied
+architects
+more complex
+toll
+takers
+sampling
+novel
+triggers
+eyewitnesses
+assets
+trump
+hundred
+arise
+Continual
+overdosed
+confessed
+shin
+underscore
+Extravagant
+inking
+implicated
+petroleum
+baits
+shanking
+communities
+delicious
+Gorges
+ice-cream
+prostituting
+cherry
+freaked
+Scottish
+hawking
+co-stars
+fugitive
+alternative
+harshest
+ubiquitous
+loss
+flour
+skinners
+subvert
+dollars
+structures
+AIDS
+massacred
+domains
+rests
+guild
+awaits
+suddener
+Poem
+tapes
+importantly
+yawning
+Mysterious
+tingles
+cresting
+fingerprinted
+Seventeenth
+could
+tribes
+Timings
+blankets
+illustrate
+interrupted
+frail
+feuded
+hillsides
+nominating
+bye-bye
+veins
+discloses
+orgasms
+puffiest
+rummaged
+brutes
+comply
+thermometers
+stabling
+enrichments
+buoying
+dance
+housewife
+carted
+endeavoring
+warrant
+grotesque
+solemner
+courtship
+superstition
+likened
+assistant
+airspace
+facilitators
+ridden
+moaning
+dismantles
+rippled
+condominiums
+most patriotic
+treasuries
+anyhow
+stripes
+irritating
+legitimized
+objectivity
+quart
+liquors
+Gracious
+enlightening
+countless
+Apartheid
+queen
+goodbyes
+volatility
+openings
+grassiest
+offspring
+overturn
+combed
+changed
+excluded
+reasonings
+friend
+shaded
+dismissal
+Earls
+alliances
+depressings
+nibbles
+entertainings
+skepticism
+more helpful
+believed
+shake
+impoverishes
+publication
+articulation
+linked
+Creator
+Dove
+bund
+kitten
+horrifying
+deal
+dictation
+charts
+grown-ups
+paves
+overheard
+emulating
+venetian
+brightly
+restrict
+ceramics
+canes
+twirl
+insensitive
+ponds
+ciders
+landslides
+attacked
+dressed
+Orphan
+reimbursements
+mustaches
+constraints
+surround
+shiny
+Algebra
+imagery
+exuding
+solved
+counterinsurgency
+disgraced
+intertwine
+breezed
+courts
+shuffling
+paints
+sponsorship
+glamorous
+notebook
+slack
+leftists
+Certain
+long-distance
+reproduces
+baggage
+formerly
+piano
+plum
+fireman
+more tearful
+sufficient
+complementary
+absorbed
+herring
+jazzing
+things
+leaf
+berries
+owl
+family
+noisiest
+appraisals
+snag
+tyrannies
+sagged
+madden
+dressers
+randoms
+shattering
+ease
+Quote
+Yeah
+confiding
+apparent
+striding
+catfish
+unpacked
+dries
+manage
+domestic
+unsuccessfully
+russia
+singled
+transferred
+co-founders
+reuniting
+shitting
+dust
+hearth
+foreseeable
+munch
+number
+impending
+victimizes
+aerospace
+acquisition
+edible
+constraining
+iceberg
+countries
+suggestions
+lifetimes
+substantives
+partial
+Sect
+fanatics
+demo
+wielding
+hinders
+oiled
+selecting
+paroled
+negative
+space
+Demonstration
+deleting
+red
+advisory
+mediations
+cow
+plus
+overflow
+Utmost
+fantasizes
+brightening
+supervising
+tacked
+sinners
+window
+motorcycles
+wanders
+layers
+booting
+queues
+coordinate
+tormenting
+amends
+content
+wan
+unwilling
+mop
+extraordinary
+wrinkles
+fastest-growing
+ran
+Converse
+sensible
+Regents
+propagates
+baffling
+dwindle
+schema
+missile
+barreled
+rumors
+acceptable
+agents
+recede
+pricing
+basement
+rubbers
+close-ups
+despising
+predictable
+uncover
+foes
+muscles
+burying
+steeling
+cradled
+proxies
+track
+pointers
+kittens
+robuster
+functionings
+Descent
+sort
+fainting
+deemed
+wallpapering
+prohibited
+tablet
+biography
+foyer
+sourcing
+unifies
+forums
+serum
+munched
+vivider
+sperms
+Anglican
+memorize
+slowed
+oppressive
+zeroing
+anywhere
+tariffs
+sexism
+fighters
+breaking
+taxis
+honeys
+chronicle
+hanger
+earnest
+femmes
+subsidiary
+chirps
+traveler
+female
+switching
+pretending
+searching
+rallies
+justification
+paranoid
+cycles
+rums
+nutritional
+buttermilk
+contingents
+laptop
+forehead
+humming
+rectangular
+Close
+dishonest
+amplify
+identity
+smoker
+attack
+sewer
+crafts
+dismayed
+hollows
+sweeps
+dissatisfaction
+deteriorates
+exact
+embryos
+ingredients
+librarian
+biopsies
+thigh
+prouder
+pounds
+Role
+Other
+drowned
+newts
+brigade
+silverier
+melts
+Verification
+chemicals
+magazine
+unrelated
+obligate
+competitions
+caned
+depot
+fines
+ethics
+primate
+forecast
+speakers
+cravings
+furthermore
+cultivating
+fringes
+slaved
+forum
+Mafias
+supervised
+rat
+currently
+emitted
+airs
+involvement
+pensioning
+reckoned
+retailed
+ambassadress
+heterosexual
+bandits
+inclusions
+checkout
+franchising
+data
+more doubtful
+jockeying
+master
+retort
+most monstrous
+push
+overflowing
+forge
+prosthesis
+Noodle
+strode
+canine
+aspire
+beetling
+firm
+stroked
+tolled
+winked
+shingling
+drawer
+borrows
+buckled
+martyred
+profound
+Drunken
+inadequacy
+luxury
+lacking
+month
+tall
+alludes
+dissertation
+georgian
+dynamite
+Hard
+ginger
+shrewdest
+decorates
+conglomerate
+bookstores
+sinister
+ex-husbands
+unthinkable
+universal
+sunken
+dislikes
+Bound
+irons
+sentiments
+stoned
+schemed
+splintering
+rigors
+dames
+ambush
+damning
+sprayed
+locker
+robots
+remedies
+high-level
+oil
+scary
+cutest
+commonwealth
+sets
+thereby
+pajamas
+respiratory
+distort
+bristles
+heave
+more various
+dorms
+chanting
+Harry
+youth
+embrace
+carve
+diet
+hare
+cocktails
+reconcile
+kurd
+led
+Sex
+aggravated
+meads
+checks
+shiver
+lawmaker
+downed
+wands
+generalizations
+kayaked
+conduct
+dens
+until
+arraying
+implication
+segments
+internally
+evacuated
+sera
+critiqued
+five-year
+bracket
+incensed
+freed
+Stained
+transcending
+Holies
+Lutheran
+dilemma
+Canadian
+analogous
+chose
+uncles
+passage
+atmospheres
+dwelt
+seriousness
+dug
+awarding
+botany
+characterization
+undesirables
+staunch
+buns
+assumed
+gospel
+publish
+mistakes
+moors
+eliminated
+multinationals
+intruders
+whining
+interned
+instabilities
+Fearful
+buddying
+tart
+news
+Christian
+paraphrases
+song
+notify
+pronunciation
+seeker
+declarations
+chuted
+future
+Surgeons
+slates
+secondary
+bristled
+moan
+salvaging
+overstate
+astonishes
+incited
+havoc
+slights
+fiction
+coiling
+creamiest
+attendees
+eras
+mends
+pulses
+crushing
+information
+Whereabouts
+refinement
+accidentals
+messes
+magic
+acknowledged
+traversing
+cope
+olive
+eroding
+jade
+tendering
+deacons
+labels
+Singapore
+Canary
+isolate
+departs
+portfolio
+redirecting
+retention
+paycheck
+crippling
+stunnings
+rejoicing
+coped
+detection
+trillion
+inequality
+waiting
+pass
+turmoil
+most notorious
+Ketchup
+developments
+teach
+wore
+overseas
+bureaucratic
+perception
+It
+teaspoons
+spokesmen
+Midtown
+literal
+smoothing
+regulates
+quaint
+ever
+dwarfing
+swiveled
+calmness
+grandparent
+front
+enlarging
+elegant
+crest
+juveniles
+Tuesday
+colder
+glucose
+linking
+Seventeen
+unreasonable
+kilometers
+requisites
+self-control
+enzymes
+tearful
+coats
+learned
+statesman
+acquire
+sensing
+Unlike
+fatigues
+payment
+affair
+amidst
+Algerian
+collectors
+cheering
+unfriendliest
+maddest
+curing
+premiering
+worldliest
+fuck
+decry
+deflected
+blossoming
+executed
+bureaucracies
+boosted
+ironies
+Exclusive
+underscoring
+bugging
+plumping
+californian
+Sentimental
+speculates
+reflecting
+expanse
+televised
+explicitly
+reported
+enjoyment
+warfare
+Sympathetic
+toast
+lugged
+social
+bagel
+spoons
+farthest
+dumping
+millions
+exhibition
+prop
+roast
+solidify
+grocery
+chocolate
+murder
+Threat
+devoting
+curliest
+dilute
+fiber
+unprecedented
+wailed
+pauses
+glazing
+raked
+newspaper
+chimes
+chops
+misunderstand
+possibilities
+present
+previewing
+wary
+umbrella
+startled
+resumed
+abruptest
+cannon
+now
+modern
+sized
+concerns
+slick
+worshipping
+trekked
+designers
+celebration
+swap
+audit
+alleviating
+Tees
+dressing
+upscale
+uh-huh
+sandwiched
+windrow
+volumes
+copied
+sportsmen
+phrasing
+describes
+gulls
+processors
+reconsiders
+gaps
+masculine
+waterfront
+unearthing
+impress
+sigh
+anytime
+jet
+confederates
+killer
+propel
+smirks
+bananas
+retaining
+attendants
+describing
+adopted
+horrify
+bungalow
+stray
+usage
+oohed
+lemons
+comprehends
+fencing
+eruption
+pave
+unanimously
+individuality
+merging
+encyclopedias
+sanctions
+privileges
+scolded
+prolong
+freshwater
+blockbuster
+omits
+splinters
+dreading
+knitted
+host
+canoeing
+advent
+cheerleader
+Methodist
+midwife
+imperative
+insistence
+stays
+nominations
+ape
+vaccines
+risked
+Oracles
+prodded
+extravagants
+hence
+whereas
+literals
+inaugurate
+police
+pastry
+dusting
+bordering
+organised
+smearing
+saddles
+publicizing
+blame
+grocer
+ensembles
+leeks
+affairs
+clamoring
+assuring
+sulfurs
+mainstream
+convening
+half-dozen
+boasting
+nurseries
+excepts
+seconds
+keenest
+pistoled
+luck
+sensual
+homesteading
+merer
+reinventing
+patty
+spacing
+responsible
+artifacts
+exodus
+hectare
+shorter
+biology
+grips
+wedges
+oversaw
+typically
+shrimping
+sociological
+floral
+precludes
+jolts
+brought
+bathrooms
+flaking
+meteorite
+critique
+Hut
+fatalities
+suspect
+town
+canopy
+relocate
+part
+beatings
+crookedest
+more gracious
+weakest
+interviewed
+extractions
+rotting
+per
+workforce
+widths
+waste
+birdie
+visual
+humanitarian
+enrolls
+suddenly
+acclaiming
+stereotype
+products
+sidelines
+bulkiest
+enameled
+chateaux
+protagonist
+more rigid
+jealousy
+seas
+ahead
+ventures
+coups
+ugliest
+frogs
+liberating
+birch
+theorized
+nomination
+fluency
+parroted
+more inconvenient
+Moslem
+bare
+beck
+claims
+impossibly
+dizzying
+imitates
+otters
+arguing
+repetitive
+trammed
+complaining
+texture
+intersecting
+runners
+some
+wicker
+seismic
+rotor
+implications
+thickened
+overly
+originals
+orderlies
+caskets
+competency
+spirits
+initiates
+slackest
+growls
+most turbulent
+distinctive
+consulting
+Felon
+ebbing
+Opaque
+stoning
+theater
+foreigners
+hollering
+Sun
+rainy
+hollower
+representatives
+batches
+clings
+ousts
+orientation
+savvying
+prefers
+Middle
+weaken
+artichokes
+feud
+collaborators
+tangled
+real-life
+reporting
+fire
+collapse
+relocating
+dear
+Relative
+Premium
+Engineering
+greasiest
+lesioned
+e-mailed
+RBI
+martian
+idling
+fantasying
+Victor
+rarer
+loosened
+Portuguese
+moderate
+keener
+considering
+videotapes
+sidewalk
+chechen
+trained
+overloads
+staging
+plows
+shadowiest
+sunnier
+warps
+profiles
+insure
+fraught
+Boom
+eager
+knifing
+Obsolete
+interviews
+crisps
+appendix
+substitute
+sinks
+foresee
+lesion
+insects
+certification
+blaring
+mug
+limb
+brows
+narrates
+squares
+caches
+bedtime
+libertarians
+browser
+choice
+wastes
+rapider
+outing
+negotiating
+hunches
+Yoruba
+mesmerize
+bold
+Haunted
+shallows
+improvised
+full-time
+concept
+scopes
+mitigated
+simplest
+whisk
+janitors
+Sovereign
+lined
+dormant
+revelations
+skirt
+pursed
+scarier
+vines
+quarterback
+tiresome
+sluggish
+PCs
+wearying
+closing
+speck
+yeah
+records
+biographies
+meant
+Unfortunate
+muffins
+scurries
+revolving
+incorporates
+balconies
+handsomest
+arider
+afflicted
+Irish
+renewables
+drivers
+percentage
+Slave
+blows
+awful
+commences
+beefed
+appropriates
+conceptualizing
+grazed
+Morning
+bussed
+sniper
+Thunder
+thunderstorm
+cocoa
+fair
+entrances
+unravel
+proactive
+possessions
+garners
+secondhand
+migrant
+extending
+wealth
+promotion
+immoral
+goggling
+recite
+finland
+ipod
+microwave
+leverage
+Captain
+welding
+remodeling
+slash
+sedimenting
+highways
+bolstering
+bid
+Oh
+eh
+decreed
+tolerant
+Golden
+journeyed
+summarizing
+contact
+injustices
+excesses
+baltic
+prosecuting
+latin
+tuition
+dried
+wash
+more rugged
+flashback
+brass
+yoghurts
+two-week
+reward
+difficulties
+aides
+confinements
+easel
+kissed
+dope
+refrain
+mixing
+brazil
+prescribe
+cheerleaders
+Severer
+confess
+bewilder
+donkeys
+strategy
+allocation
+constrained
+improvising
+pinpoints
+Classified
+rec
+lightening
+foray
+recess
+Parisian
+punts
+memoirs
+tide
+day-to-day
+wait
+lecturers
+parenthood
+Reading
+reconstructing
+moose
+distrust
+Volatility
+swarmed
+roach
+permission
+interdependence
+through
+farther
+irrelevant
+sharpen
+equally
+faction
+crosstalking
+co-ops
+tenures
+begging
+much
+buds
+correcting
+legend
+encountered
+fertile
+ashtrays
+focus
+margin
+sterling
+majoring
+travel
+pedaling
+republican
+Patent
+categorizes
+substitutions
+most convenient
+hoop
+incomes
+swimming
+antelopes
+sightings
+moons
+polish
+commitment
+unspoken
+barns
+replenish
+automobiles
+strong
+Belgium
+slacking
+drawing
+theorizes
+reimburses
+Traumatic
+Shin
+insight
+fox
+expelling
+voicing
+patiently
+sedimented
+pearls
+warmest
+newfound
+opener
+loftier
+Jolts
+cuisine
+reaffirm
+upstate
+exception
+maddens
+opportunities
+enlist
+credited
+bilateral
+legitimizes
+advanced
+kit
+hypothesizing
+fraud
+consumers
+desperately
+itinerary
+impede
+attiring
+icon
+suffices
+Whole
+ransoms
+subgroup
+weed
+insurance
+soaking
+imagined
+clones
+most spectacular
+hindu
+circle
+nude
+acknowledging
+dispels
+timelines
+Straight
+epic
+Cheddar
+rethought
+disintegrate
+visitors
+postcolonial
+accepts
+current
+pamphlet
+bait
+Pink
+sledded
+dodgers
+Souvenir
+glanced
+baying
+self-conscious
+grounding
+irritated
+repaired
+decreeing
+allude
+fender
+wrestlers
+electorate
+ported
+coasts
+more unofficial
+apricots
+ideas
+burglar
+watery
+billowing
+talents
+zip
+solemnly
+Seal
+sayings
+qualifies
+domain
+mudding
+gramme
+relates
+rut
+whimpering
+motivational
+release
+jugging
+more infectious
+whaling
+tunic
+fores
+mantles
+cancer
+forage
+Inner
+notes
+gunmen
+originate
+detachment
+fidelities
+wrestled
+Taking
+transnational
+dealing
+impulse
+wins
+raising
+sockets
+tyrants
+yummiest
+more prudent
+trainings
+more acceptable
+attorney
+influx
+Beloved
+drew
+walkers
+mazing
+briefly
+Natural
+clone
+Tank
+more artificial
+snow
+therapist
+sprouted
+misconceptions
+Feminist
+mariner
+beams
+teaspoon
+engraving
+castings
+abuses
+percent
+punish
+medalled
+disposes
+camels
+limits
+glued
+trade-offs
+men
+tornadoes
+officially
+reproduce
+dividing
+palled
+sewers
+watchdog
+diabetics
+recover
+curtains
+rushing
+spaceship
+downs
+hulling
+confidentiality
+elevation
+quality
+side
+heroic
+hobbies
+sovereignty
+prepared
+eraser
+Stub
+obscene
+snared
+cods
+cops
+dangled
+blackboard
+oath
+mellowed
+centerpieces
+Empires
+fused
+overweight
+shames
+weeds
+pole
+rabbits
+gambled
+ounce
+uranium
+exiled
+screwing
+handbook
+humidity
+parishioners
+assassinated
+apprehension
+Washer
+abundant
+transplants
+necessities
+brandies
+ecstatic
+idiot
+rearranges
+plotting
+hardiest
+download
+survivors
+tightly
+offenses
+reasonably
+strategists
+teacher
+submerge
+departing
+oilfields
+greatness
+Tenure
+emerged
+mosaics
+deceive
+lutheran
+cacti
+openers
+observer
+allied
+maintained
+Kaisers
+kits
+cycle
+unleashed
+arrows
+scouts
+confederated
+gentry
+nurtured
+slaying
+totalitarian
+bowled
+Par
+pikes
+muscular
+smugglers
+Password
+knob
+produced
+practised
+modesty
+refer
+dimension
+crude
+rainier
+diagnosing
+cliffs
+concentrated
+erase
+connotations
+clay
+plagued
+reversals
+bowels
+sculptors
+persists
+promoters
+quantity
+herrings
+imitated
+opennesses
+guidebook
+hunger
+quarreling
+stale
+Safaris
+operas
+blondes
+fatiguing
+traversed
+plywood
+hazardous
+extremest
+alterations
+shatters
+tidaler
+tower
+garaged
+squirts
+ninja
+execution
+forking
+one-third
+brightens
+optics
+deli
+eligible
+tufting
+powerhouse
+Yen
+teapot
+aggressively
+shimmering
+forbade
+Sterling
+sifted
+buyouts
+tailor
+inducing
+quilting
+drummer
+inconveniencing
+equities
+Extra
+regiments
+cued
+arched
+pace
+depends
+practitioner
+headphone
+Bat
+assessments
+neighboring
+intrude
+stocking
+vanishing
+lunches
+scallops
+chances
+collections
+superficial
+rutting
+gape
+cabled
+informs
+tagged
+inevitable
+nitrogen
+semiconductor
+more fluent
+fishery
+freighting
+stable
+kill
+subverts
+plops
+conversations
+rotating
+praised
+complicates
+geniuses
+disseminating
+Downward
+families
+attic
+yachting
+purposes
+camped
+greed
+gunman
+desired
+artists
+hungarian
+point
+darned
+vied
+antibody
+porting
+bird
+deserving
+Violet
+despite
+rousing
+unavoidable
+boulders
+sweepings
+ashes
+triumphs
+independences
+disastrous
+constructive
+armenian
+durability
+stalks
+carton
+brotherhood
+cloudy
+code
+standoff
+trampled
+reducing
+reacting
+fighter
+tapered
+firings
+tinker
+Wonder
+Surviving
+creek
+honours
+accessories
+harbours
+passing
+motto
+raved
+hailing
+transport
+lunatic
+Drakes
+learner
+hazards
+retreated
+more remarkable
+lurks
+returns
+consolidating
+reaffirms
+discriminating
+creeds
+articulates
+silvery
+deserve
+skateboard
+optimized
+bearers
+meeting
+printers
+millionaires
+drought
+camps
+Best
+backed
+frank
+ripping
+plucked
+detectives
+blesses
+reappears
+refuse
+apologizing
+sect
+overwhelmingly
+pulps
+marriages
+sticky
+chant
+Disproportionate
+periods
+tortillas
+primitive
+combinations
+irritations
+grotesques
+antisocial
+perished
+insufficient
+reshape
+welcomes
+tempered
+embodying
+peter
+cites
+israeli
+imposed
+championed
+peaches
+creeps
+parish
+thrift
+treasuring
+branching
+snarls
+dignifies
+rages
+sprouting
+freckling
+receipt
+whines
+falcons
+prime-time
+overloading
+diminished
+regarded
+Shocked
+mourned
+bulk
+election
+stacking
+severely
+pollsters
+mailings
+refining
+note
+limited
+mingle
+salons
+more preferable
+pray
+insured
+coupon
+functioned
+rapping
+voiding
+diva
+above
+unacceptable
+fads
+hoods
+ethnographic
+smoking
+trainees
+corporate
+stashed
+abounds
+concurs
+pinked
+factor
+nearing
+nun
+aspiration
+upgrade
+stripe
+arsenic
+purchase
+hypothesized
+delicately
+cows
+darted
+testers
+markedly
+densest
+spurring
+silver
+moisture
+scowling
+inevitably
+killed
+glittering
+violation
+quarrel
+erode
+refraining
+superbest
+wee
+mouse
+implied
+fearless
+thicket
+apparently
+sunny
+snowfalls
+charger
+melons
+rhymes
+Shifts
+suiting
+pathway
+bamboo
+reinforced
+most acceptable
+mogul
+coppers
+notching
+choosing
+wants
+overtook
+biologists
+oiler
+seasons
+Screenplay
+demonstrator
+synagogues
+clear
+appellate
+volunteer
+depended
+orderly
+balking
+ambitions
+gateways
+genus
+sabotages
+most gigantic
+Now
+alliance
+applaud
+toiling
+troop
+fracture
+stressful
+overlaid
+golfing
+marketer
+wanted
+perversest
+holder
+seem
+freckle
+bruises
+geese
+giggled
+ducked
+parisian
+My
+barley
+greener
+postcards
+determination
+commandos
+scarred
+ceases
+personalities
+elevate
+darkens
+contemplates
+industrializing
+catches
+locations
+recruiting
+accommodate
+magnetic
+heftiest
+formals
+Marvel
+most tenuous
+regardless
+civilizations
+bye
+temptation
+most nervous
+cones
+field
+shooters
+delineates
+assassinates
+inappropriate
+assigned
+Sorry
+darkness
+confide
+grape
+juniors
+sliced
+significant
+saloons
+tending
+tighter
+sketch
+freeing
+foreground
+orchestra
+employers
+hairiest
+swarming
+yourselves
+bleached
+scroll
+tapping
+rapport
+petrol
+shawl
+sublime
+loveliest
+certifies
+Also
+evoked
+boutique
+gazes
+volleyball
+detector
+corpus
+afflicts
+telegraphed
+Southeast
+portrayed
+symbolism
+enormously
+flurries
+nation
+waffle
+jollied
+Alpine
+cliff
+cooperating
+clattering
+tingled
+rough
+littering
+classified
+Hierarchical
+stepmothers
+lavenders
+more microscopic
+realms
+preschooler
+Delegates
+admiration
+else
+successive
+Budding
+abided
+perform
+whiteness
+cookies
+pensions
+bedded
+scurried
+three
+mobilization
+shared
+disk
+withheld
+singling
+masterpieces
+stint
+mixes
+dissenting
+logos
+socializes
+targeting
+migrants
+auxiliary
+grayest
+piloted
+horde
+most complex
+handiest
+hmm
+scares
+soldiers
+stand
+Ah
+fringing
+wetlands
+analogy
+thugs
+syrup
+rash
+low
+oral
+art
+competition
+aspects
+vial
+environmental
+commuter
+Background
+outrage
+purported
+repeating
+titans
+image
+messages
+spot
+fattest
+harassing
+whip
+luckier
+quote
+fouls
+recognizes
+phasing
+validate
+driveways
+remodelled
+buttocks
+Parliaments
+flung
+sales
+slump
+dictatorships
+lightweight
+federally
+testifies
+editorial
+times
+lily
+marinas
+look
+sliding
+renamed
+Russian
+underwent
+comps
+deflecting
+thicknesses
+unfriendlier
+o'clock
+intakes
+chapping
+wolf
+dozens
+thirsting
+burdened
+genius
+alerted
+allergy
+pediatrician
+norms
+Fascinating
+recently
+got
+blazed
+locality
+conditioning
+Fragmented
+stalk
+execute
+newer
+cross
+poem
+settlements
+choir
+conveying
+slices
+yoghurt
+duets
+grazing
+flower
+withdrawals
+dispelling
+timing
+Parents
+cushioned
+supermarket
+Civil
+wasps
+dramatic
+preseason
+proponents
+flashy
+failing
+authoritarian
+unemployment
+continuum
+ruined
+rots
+bagels
+fragment
+acquitting
+distinctions
+multiple
+disregards
+more substantial
+cleanse
+fabled
+dearest
+escaped
+peers
+dub
+majestic
+aspired
+maddened
+tree
+mantra
+shamans
+attraction
+preemptions
+mover
+oyster
+sweep
+sue
+solvent
+recycles
+rearranged
+govern
+knots
+examinations
+grower
+mushroomed
+backings
+starlings
+perpetrates
+governance
+occupancy
+shelling
+scratching
+horrible
+ensemble
+acknowledgments
+Fuse
+investigating
+blisses
+moroccan
+clue
+toothbrush
+tucking
+mainstreams
+witnesses
+Israel
+throned
+ancestor
+faltering
+valuable
+accesses
+pit
+beggared
+wrestles
+hazy
+Obscene
+sympathies
+stiffer
+most successful
+empties
+headmistress
+spouses
+Skeptical
+visuals
+trudges
+Most
+postcode
+harrying
+twitches
+crumblings
+lucky
+diversifying
+returning
+maximize
+screamed
+gaining
+cranes
+Dickens
+underlying
+whisking
+desperation
+depressed
+reorganized
+renames
+second
+overseen
+vapor
+reinforcement
+most predictable
+bathing
+nuns
+missing
+telephones
+abolishing
+destination
+miserable
+barreling
+causing
+congratulating
+tuned
+jutting
+implanting
+symbolized
+bowling
+pottered
+shampoo
+intently
+dueling
+confirmed
+bull
+locked
+force
+whistled
+sailings
+jabs
+gushes
+renting
+beaconing
+emphasized
+pirate
+Lieutenants
+reaching
+eighteenth
+pervade
+glistened
+governors
+literary
+medal
+tinting
+Upward
+flora
+imaged
+deterrent
+crowds
+syncs
+dialing
+wrenches
+chancing
+expecting
+grandson
+avidly
+renounce
+coerces
+Closely
+hazarding
+enjoying
+suckers
+loyalist
+consecutive
+heightens
+derailing
+shyer
+trading
+hourlies
+dollying
+murders
+bloody
+portables
+bothers
+most seminal
+blowing
+Interplay
+bead
+regimenting
+handouts
+france
+amounting
+leaks
+accommodation
+excellence
+brewer
+Midsummer
+frustrations
+hiker
+terracing
+restriction
+specking
+pimp
+estimated
+grandchild
+optimizing
+protrudes
+encouraged
+revel
+inspected
+commanders
+dusted
+retreating
+shimmered
+loyalty
+jointing
+sloppy
+grades
+sons
+resistance
+contradicts
+prosecution
+spawns
+heaping
+dietary
+claws
+abducting
+Super
+penalties
+shaft
+Discriminatory
+inch
+announces
+symptoms
+monument
+attractive
+periodic
+evaporate
+pound
+tenses
+caravans
+twelfth
+Turbulence
+fruit
+trades
+bounces
+lobbyists
+tripling
+lobbied
+militias
+examination
+breasting
+shortages
+rap
+boards
+lull
+steal
+valuing
+savviest
+fillet
+Upwards
+preference
+proud
+Gospel
+psychological
+decisive
+imaginable
+shuffle
+towers
+heater
+tilts
+urged
+palming
+utters
+crouch
+yanked
+peg
+fascists
+aptly
+phantoms
+markings
+woes
+execs
+accrued
+recounts
+Heavenly
+diamond
+islamist
+projected
+confronting
+compel
+holidays
+prolific
+snakes
+spinach
+recipients
+noted
+Acoustic
+norwegian
+sexy
+grapefruit
+Purely
+magnificent
+marsh
+sucker
+hooped
+Cancer
+contemplate
+tempt
+president
+blackberries
+abandoning
+anchoring
+exemplifies
+authoritative
+alter
+severity
+strangers
+undersides
+soars
+crazy
+hookups
+jading
+Captive
+envelope
+advocacy
+sin
+graceful
+cycling
+refreshes
+tribunes
+handmade
+turkey
+waffled
+analytical
+Respective
+developing
+entitled
+puffy
+totaled
+gingers
+briefcases
+destabilized
+Wales
+fledgling
+confounding
+most tasteless
+overall
+trays
+delegation
+caveats
+empty
+fumble
+shares
+delving
+muddy
+classification
+commits
+timeless
+undresses
+neuron
+raves
+Read
+oohing
+labyrinths
+provider
+plot
+glide
+inspirational
+emblems
+hijacking
+thirds
+verifications
+Outlook
+drummed
+consequently
+tallying
+alien
+employments
+pastes
+deductible
+guaranteeing
+pissing
+vastly
+discriminates
+jeopardizes
+quarries
+amazingly
+exerted
+gals
+rating
+assemblage
+cornered
+credit
+modify
+tossing
+sharpest
+shrubs
+strut
+Main
+nasal
+unique
+onions
+mowers
+souring
+Sometime
+repealing
+Fern
+bicepses
+firework
+assumes
+currencies
+choruses
+avocados
+most reasonable
+strip
+rockier
+grant
+tablespoon
+station
+coral
+summoned
+embarked
+Lending
+flattest
+clicked
+starves
+OffShore
+paralyzed
+disperse
+exchanging
+restrooms
+faked
+rubs
+fear
+sisters
+pronouncements
+owes
+broken
+Official
+rogue
+total
+club
+apprehended
+shampooed
+pansy
+elks
+leading
+attractions
+scarcity
+troupe
+quotation
+banking
+psychic
+invasive
+photographs
+governed
+dived
+decision
+recording
+freshman
+steadier
+tins
+ridiculed
+calories
+clocked
+assembly
+fancier
+nibbling
+stairs
+C
+withered
+dame
+predatory
+most careful
+astonish
+deprived
+terminates
+spanish
+commemorate
+stables
+retorts
+anti
+roguing
+diagnoses
+responsibilities
+hunted
+coffins
+immaturer
+organise
+grizzlies
+stockings
+liaisons
+peppering
+beast
+undertaking
+films
+rovers
+dolly
+spectra
+rendezvous
+encompasses
+Australian
+swayed
+pollutes
+freeway
+Empirical
+hardy
+iris
+coaches
+loosely
+detecting
+assert
+gangsters
+handed
+tribunal
+clip
+prejudices
+easiest
+sitter
+mortars
+enemy
+dictator
+inventories
+withholding
+speak
+footprint
+empowerment
+superber
+aquariums
+mathematician
+more fabulous
+more stressful
+accumulated
+booze
+furrow
+amid
+opened
+Historic
+wirings
+Westward
+coal
+broadcaster
+lecturer
+addicting
+more comparative
+address
+hugest
+dismay
+escorting
+assembled
+bulge
+bell
+intimating
+swapped
+shadowier
+grade
+discover
+ribbon
+rolling
+magnified
+mellowing
+runner-up
+praying
+newly
+inner-cities
+Boring
+misery
+aide
+uttered
+hunched
+scatters
+Foremost
+veterinarian
+toppings
+deferred
+vague
+overcomes
+naps
+buffaloes
+rewarding
+Magic
+sentences
+mashes
+gambling
+appears
+chimney
+fliers
+ram
+repealed
+driest
+grated
+pumps
+injuries
+certifications
+Albanian
+able
+piss
+meandered
+noticed
+saute
+banked
+bunny
+dispensing
+helpful
+inciting
+motivating
+adventures
+all
+makeups
+global
+beckoning
+traveling
+escorted
+Comedian
+dazzles
+crook
+laborers
+Federalist
+accustom
+apparatuses
+shutters
+tendon
+ate
+outspoken
+purity
+sacking
+matrices
+harbour
+nines
+advising
+diversify
+attribution
+comet
+forays
+expensive
+deck
+cocktail
+judgment
+element
+drunk
+harms
+rapes
+centimetre
+statics
+scarcely
+photons
+traffickers
+victimize
+fractures
+lights
+shacked
+computer
+nationally
+ridding
+destined
+most fertile
+excluding
+leather
+merges
+shelves
+shopkeepers
+navies
+affordable
+recurrent
+missionaries
+scarcities
+leashing
+soccers
+gloomy
+classify
+authorizes
+nothing
+collapsing
+buckles
+Lithuanian
+levying
+cadets
+martyr
+chalked
+midwest
+recital
+greens
+unrealistic
+cartoon
+getting
+whined
+indulges
+overwhelmed
+population
+assimilating
+city
+Mystic
+grim
+nuance
+tension
+misuse
+hangars
+mean
+parachuting
+flipped
+insecure
+clasping
+Fourth
+bog
+rifts
+correct
+victims
+busts
+pivotal
+brokers
+more devastating
+greeted
+Nightly
+thai
+Usually
+otter
+reports
+Pro
+Express
+Alert
+scowl
+squeal
+habits
+gown
+spiking
+aerobic
+continuation
+most palpable
+ruler
+unforgettable
+penny
+fellows
+disciplines
+fifth
+affiliate
+relic
+contrasted
+concludes
+deterred
+stridden
+factories
+eroded
+freckles
+rooms
+patio
+homeland
+holed
+mammoth
+baring
+barrages
+possession
+running
+popcorn
+skating
+resembling
+chronic
+troopers
+pocketing
+dj
+Oral
+physically
+referral
+centennial
+fenders
+kilometres
+greet
+denounced
+counter
+cinema
+Ultimate
+swaps
+more depressing
+temporary
+commonplaces
+placed
+Cypress
+obeys
+arrogantly
+depths
+catalogues
+Auburn
+penguins
+spots
+develop
+i.e.
+rarities
+morph
+humouring
+absentee
+sixth
+scams
+ordinances
+calculation
+Depth
+kickoff
+faces
+supplement
+people
+most conspicuous
+tremendous
+changing
+snowfall
+more neural
+vanishes
+silicone
+deaf
+freckled
+scissors
+enormous
+clerking
+remorse
+more modest
+disturbances
+decayed
+paralyses
+headier
+cohort
+resenting
+anchorage
+shat
+geologists
+hopping
+Palettes
+evils
+stands
+productions
+vouchering
+throne
+drills
+welsh
+Rebel
+rice
+freest
+flounders
+Utilitarian
+displays
+loots
+humored
+weekends
+Muslims
+variants
+ton
+than
+burger
+plants
+diagrams
+Star
+underwear
+savored
+degenerated
+hunching
+cognition
+founders
+press
+Unconditional
+legalize
+jogged
+sparkles
+misusing
+subs
+infectious
+spark
+shards
+thumped
+shortcomings
+bellying
+cling
+wronged
+cloned
+frauds
+meteors
+brood
+forefront
+huddles
+weapon
+brands
+coding
+dooming
+plight
+supering
+sift
+groundwater
+revisiting
+hospitalizations
+ascent
+tease
+Random
+suffered
+sap
+vacuumed
+fork
+drinks
+oblige
+pecked
+disappointment
+supplies
+genre
+fortresses
+cactus
+autobiographical
+clause
+squids
+routine
+milestone
+stain
+most considerable
+stabilization
+advertising
+voters
+Central
+abusers
+graphics
+gale
+obligating
+offense
+annoyance
+employ
+contacted
+grasses
+traits
+blending
+carpenters
+wetting
+simplistic
+lures
+recipes
+state-of-the-art
+children
+voluntarily
+listens
+corralling
+daily
+sprinkle
+misty
+atrocities
+Chile
+supply
+til
+yang
+Cubic
+hazing
+origin
+nuder
+hostess
+screeches
+spirals
+pork
+propositions
+cafeterias
+woodland
+Strife
+faulting
+peninsula
+submarines
+blind
+posture
+stutters
+containers
+scraping
+free
+multiples
+palace
+neighbourhoods
+parcel
+wagons
+orchestras
+looming
+nodes
+glistens
+borrowed
+fond
+nonstop
+limitation
+cardiovascular
+exits
+format
+momma
+Diet
+swells
+connotation
+Slippery
+zucchini
+most interesting
+spied
+instinctively
+triple
+recipe
+adorns
+presented
+disabled
+rethinking
+floodings
+jug
+pea
+novae
+Initial
+springing
+crescent
+businesses
+bunk
+propping
+posits
+overeats
+meaning
+award-winning
+graphed
+gouges
+snugged
+highest
+plies
+salvaged
+recovered
+embedded
+toddlers
+landowners
+pars
+bewaring
+intersects
+not
+wining
+disgusted
+wardrobe
+jigs
+disproportionately
+ferries
+kinder
+leads
+synonymous
+brained
+carrying
+tiling
+postwar
+degenerating
+street
+demolishes
+stifling
+noon
+hipping
+hat
+heaved
+more magnificent
+explained
+fulfills
+pads
+detaches
+crime
+perfection
+sequenced
+anticipated
+ads
+cutbacks
+shortcuts
+Overboard
+Diesel
+mate
+sauteing
+thursday
+boss
+hissed
+millennium
+Monstrous
+wringing
+Vocal
+Moroccans
+regulated
+accounts
+ritual
+privileged
+mesmerized
+whiled
+shuts
+rigorous
+southwest
+most positive
+hobble
+powers
+twinkled
+jeep
+educating
+plunging
+skulls
+high-risk
+huskier
+anything
+reckons
+Attempted
+Odyssey
+abusing
+humid
+commando
+Butcher
+relinquishing
+assorted
+frays
+starting
+mailed
+choking
+subverting
+beamed
+skid
+beginner
+appreciations
+supports
+residency
+altitudes
+rationalizes
+solutions
+chord
+clocking
+substituting
+zooms
+downright
+feedback
+aunts
+concentration
+moustaches
+motherlands
+scraps
+gleamed
+lambing
+skied
+figured
+entirely
+chair
+TILL
+operator
+macros
+psychiatric
+fortunes
+constructed
+neutralized
+dyeing
+investigator
+shootings
+brilliance
+affection
+roommate
+possessed
+spits
+thwarted
+dulling
+survey
+Acid
+expansive
+oceans
+nodding
+prominently
+Or
+unprepared
+perches
+cherishes
+slowdowns
+steep
+sensibility
+Thanksgiving
+nestle
+chlorine
+suppliers
+adobes
+couplings
+more inventive
+hindered
+Wont
+yearnings
+recalls
+Mute
+blissed
+arab
+movies
+frying
+swell
+flop
+splintered
+tenet
+hydraulic
+comrades
+umpiring
+outstretched
+enamels
+purplest
+shampooing
+persuaded
+dictated
+heartlands
+jeopardizing
+tensest
+towns
+most brilliant
+void
+Pi
+realizes
+Specter
+adores
+divas
+pin
+temples
+toeing
+Meter
+reaped
+Specificity
+magnet
+milder
+lumps
+announcers
+darn
+makeover
+societies
+pickles
+withdrew
+clash
+disrupting
+rabbit
+floras
+tolls
+fouling
+Black-and-white
+booker
+cop
+hens
+liturgies
+backup
+faithful
+washing
+unleashing
+gorge
+elements
+writings
+preschools
+korea
+airways
+propagating
+countertops
+cups
+fends
+clinical
+beg
+daughters
+similar
+trunk
+Sightseeing
+sews
+glinted
+brave
+vulgar
+gutting
+trust
+stamped
+mixed
+departments
+australia
+Massives
+restrictive
+mar
+scanted
+start-up
+walk
+Punch
+exemptions
+soberer
+immersing
+advise
+gazing
+universities
+strides
+Anxious
+hung
+plain
+comprising
+detected
+Iconic
+more explosive
+problem
+again
+stews
+mothering
+prohibit
+job
+breaches
+forgotten
+coves
+remade
+auctioning
+storytellers
+raspberry
+fits
+involving
+Obsessions
+withdrawing
+vanish
+exploring
+scan
+Might
+divers
+ceramic
+anguishing
+vodka
+paramount
+place
+pad
+ousting
+overrunning
+tenth
+Worth
+automatically
+bettering
+desktop
+wagered
+capture
+snug
+humanists
+prompt
+formidable
+Latitudes
+discretionary
+depicts
+faxed
+separates
+warlord
+blushed
+clueing
+utilitarian
+Arabian
+focusing
+presidencies
+hardwood
+uninterested
+most communal
+differentiation
+fuelling
+properly
+animations
+quotations
+condone
+estimating
+debating
+flashing
+dissented
+heartbeat
+Duck
+comprehension
+discounting
+neighbourhood
+squeezing
+eclipsed
+parliamentary
+phony
+focal
+mommy
+competed
+sprinkled
+judgements
+kindness
+angle
+tanker
+passages
+bystanders
+advantage
+curtailed
+comforts
+abundance
+certainties
+reassured
+rummage
+renounced
+to
+horrified
+injure
+differing
+remarks
+Eclipses
+Beers
+conceals
+mass
+guarantee
+roping
+consume
+approve
+weirder
+booths
+inside
+empower
+mesas
+broadly
+most notable
+financed
+resurrects
+professions
+acoustics
+exploit
+tunnels
+audio
+oriole
+rapper
+specs
+evacuates
+convoying
+dismal
+puzzles
+Rules
+research
+humoring
+hitches
+archbishop
+monster
+independently
+formally
+fins
+barricading
+pathologies
+most righteous
+coolers
+particle
+conjured
+spruced
+centralizing
+geology
+needed
+rotation
+pragmatic
+empires
+seize
+doctor
+Safe
+likelihoods
+guru
+brownies
+withstands
+costing
+apparatus
+compile
+production
+genuinely
+shortfall
+warms
+sink
+discovered
+shinier
+workload
+Public
+friendliest
+contouring
+windrows
+equate
+overheads
+oriental
+dissident
+Poetries
+ok
+constitute
+eighty
+donned
+immigration
+similarity
+serial
+compression
+terrains
+comedy
+veils
+urgencies
+dolphins
+gap
+reed
+envelops
+slavery
+cockpit
+metabolic
+banned
+comp
+forts
+positioned
+uptown
+put
+air
+Primals
+nebulas
+sweethearts
+Hamlet
+batteries
+starched
+greedy
+doving
+resort
+try
+retorting
+allocating
+accrues
+very
+labelled
+venues
+tongues
+stony
+Fibre
+wrote
+Lunch
+biotechnology
+grenade
+more spectacular
+assigning
+torment
+rearrange
+betrayal
+Colonels
+makeup
+Ward
+stigma
+infer
+averaged
+scrubbed
+tiled
+trotted
+remains
+privileging
+replaying
+solution
+regains
+oversights
+conquering
+gloss
+retelling
+sportsman
+bolster
+most negative
+crawls
+postponed
+battlefields
+lyrics
+harbors
+most microscopic
+wavered
+tame
+signed
+composite
+pianists
+worthwhile
+blots
+buddha
+intrigued
+reproductions
+Infusion
+cleared
+revolvers
+re-examined
+actors
+stars
+cheered
+cart
+Tulip
+wicked
+extremes
+assaulted
+townships
+sprees
+warmly
+more outstanding
+fingertips
+confounds
+drums
+succession
+ballpark
+contested
+wince
+slipped
+canyon
+days
+defying
+spacecraft
+lumbers
+morning
+podiums
+thunder
+instability
+biked
+recognize
+attachment
+limping
+halts
+pupil
+flatters
+despair
+planking
+assist
+attracted
+kneeing
+herding
+happy
+Khans
+blare
+athletic
+impart
+tightest
+sirens
+aristocratic
+oranges
+erodes
+seasoning
+Youth
+Frontals
+including
+laxer
+subscribes
+stockpiles
+extraction
+surprising
+moms
+vaguer
+miseries
+presently
+symptom
+meetings
+arms
+tilt
+interval
+swan
+tenders
+cheerful
+franker
+trunks
+cruising
+follower
+impressing
+echoes
+Sport
+perverse
+occurred
+confucius
+experienced
+tent
+draping
+publications
+rammed
+patriarch
+walls
+reductions
+navigating
+doubts
+conquest
+contentious
+aggregating
+respondents
+insane
+anomaly
+infusions
+bristle
+staunches
+radiates
+tumbles
+busied
+discrediting
+mirrored
+tracing
+most desirable
+continuity
+surgery
+gorillas
+detect
+backdrops
+behaves
+pitfalls
+physicists
+consults
+estimate
+postseason
+fibres
+separatist
+twigged
+economically
+mobilizing
+reactive
+Crossings
+ensue
+tabloids
+most compact
+picking
+arousing
+heading
+list
+paint
+clippings
+brew
+nannies
+eagerest
+swivel
+most humorous
+entrusted
+stranger
+respectful
+hypertension
+replica
+litter
+steered
+scottish
+clashes
+writers
+minibuses
+when
+bags
+tricks
+companion
+indifferents
+stab
+blogged
+person
+flushed
+next
+lineups
+reporter
+proctoring
+translate
+jewels
+sodas
+nicknaming
+relieve
+publishings
+altering
+authorize
+Rat
+satisfying
+customary
+laugh
+forbidden
+throwing
+accommodated
+succumbing
+skewering
+caterpillar
+groan
+bourgeois
+balked
+plusher
+clouds
+exhausted
+crate
+more massive
+baffled
+fifteen
+But
+patrolled
+Productive
+cents
+mankind
+weave
+cyclist
+chefs
+wholesaled
+reconstructed
+bureau
+vaporized
+partitioned
+immersions
+types
+vigor
+briskest
+presenting
+reconsider
+problematic
+sesame
+buried
+worse
+personally
+instructors
+trolleying
+surfaces
+dawned
+sellouts
+shame
+inherited
+disdaining
+sectarian
+ladened
+scaring
+bends
+translations
+billowed
+famous
+sows
+summit
+inform
+taunts
+i
+vets
+highlights
+stimulus
+scouting
+hippie
+kicker
+opponent
+killings
+disks
+Lesbians
+gleams
+obeying
+sorer
+recourse
+bearding
+middle-classes
+gather
+motivation
+class
+tell
+contours
+frustrate
+sabotaged
+local
+win
+purchased
+biscuits
+liar
+greying
+proofs
+more generic
+prohibits
+wrung
+Needles
+tubs
+implant
+dinner
+hums
+blahs
+adorning
+doubling
+declining
+reliability
+broth
+persuasions
+anatomies
+expos
+restricts
+clarify
+seventeen
+pear
+conventions
+tentatively
+absorb
+blitzes
+takeover
+microorganism
+ministers
+reptile
+flight
+fiesta
+bellies
+wipe
+front-runners
+hacked
+longings
+owning
+carcass
+vaporizing
+Unnamed
+poisonous
+successful
+heats
+endanger
+roadways
+pipeline
+wees
+navigators
+spheres
+muzzled
+gruesome
+marvelled
+grinding
+disintegrating
+boosting
+poultry
+Quietness
+sculptures
+urine
+Tree
+rotted
+mention
+hesitant
+sulfuring
+foot
+sufficing
+cradling
+athletes
+Mid
+boosts
+planning
+notches
+crusaders
+modernity
+Programming
+exam
+teasing
+temps
+more indifferent
+receives
+respectively
+curled
+academies
+sixteen
+reminded
+assessed
+becoming
+silence
+selfish
+ultimate
+inferring
+testimony
+messengers
+brews
+Conspicuous
+walking
+optional
+goosed
+Ballistic
+cigarette
+nostrils
+dismisses
+intent
+stalled
+jobs
+ballad
+dialect
+dispelled
+critiquing
+ulcer
+moth
+passed
+incomplete
+upsets
+programmers
+creator
+slant
+midtown
+rustles
+grandeur
+suggested
+educate
+Islamic
+more vibrant
+refineries
+introduced
+farming
+underline
+twists
+reassurance
+merry
+perfume
+simmer
+ambassadors
+amusing
+quarter
+houses
+pretests
+whereby
+activist
+sucked
+remarking
+preseasons
+exacerbate
+sieges
+hostages
+temporaries
+antler
+guideline
+triangles
+skeletons
+coil
+subsidize
+exclaims
+lapsed
+evenings
+hallway
+mavericks
+gold
+disturbingly
+added
+Latin
+hyped
+idle
+hues
+terra
+Boutique
+intuitive
+prides
+entailed
+banks
+repay
+Starch
+civilities
+documentary
+muzzling
+Dragon
+overrule
+subverted
+fascinates
+clans
+flus
+socioeconomic
+wooing
+hulled
+chow
+house
+summarizes
+spokeswomen
+pucks
+reorganize
+teas
+vent
+embody
+Prophecy
+withdrawn
+swipes
+diversity
+marry
+Logic
+hawks
+mouth
+transit
+annexes
+policed
+querying
+squinting
+Latter
+compels
+tsunamis
+most expansive
+let
+ghostliest
+canines
+dermatologist
+compulsive
+restrained
+thieves
+regional
+headquarters
+strung
+resent
+spearheading
+inconvenience
+aurora
+expend
+most ironic
+more expensive
+Kindly
+spawning
+advertisements
+gyms
+impediments
+advocated
+knot
+shying
+secondly
+vault
+cowboys
+Margaritas
+India
+thee
+pulled
+maturity
+transformed
+vulgarer
+casinos
+restricted
+Front
+naked
+reads
+era
+inflate
+thread
+Underwater
+experts
+raw
+overthrowing
+don
+wizard
+eater
+rate
+glory
+encompass
+faucet
+nurses
+rubies
+mounts
+clan
+professionals
+dams
+beard
+midwestern
+poems
+thirdly
+mounds
+plough
+stopped
+reinforces
+criticism
+valid
+prelude
+compares
+staked
+kettles
+difference
+distress
+destining
+goes
+expect
+tissue
+them
+cock
+helms
+mediating
+weld
+stepfathers
+marketplaces
+discredit
+quaintest
+lapse
+producers
+fledglings
+tamed
+most tremendous
+flagged
+pilgrims
+scalping
+factory
+coated
+ivory
+convertible
+how
+disarm
+ratified
+masqueraded
+bother
+curricula
+staged
+breweries
+Typical
+entertains
+departed
+mashed
+trucks
+tournament
+pact
+manuscript
+oriented
+iPod
+parks
+emptied
+Ardent
+disables
+sissy
+horn
+hinges
+fame
+exceeds
+wailing
+rosier
+cervical
+atheists
+giddied
+dip
+shuttles
+once
+playboy
+compasses
+rightly
+gut
+bargained
+hooks
+apricot
+delivers
+banding
+intensive
+forensic
+remodel
+teetered
+reigned
+notice
+Eminent
+pan
+explode
+pales
+wrecked
+clapped
+minced
+sidebars
+toxins
+rejoining
+scanners
+bits
+puffed
+Hazel
+self-reports
+rashes
+galleys
+skateboarding
+currents
+limitations
+textbooks
+fisheries
+unwise
+snails
+plopping
+cross-cultural
+clips
+tofus
+plant
+dialed
+stuffed
+sensations
+homemade
+hypothesis
+ear
+deploying
+sloping
+roam
+crafting
+coefficients
+deliberations
+dribbles
+heirs
+alaskan
+reimbursed
+achievement
+injury
+casting
+fiddling
+prefacing
+test
+totaling
+donate
+despaired
+most doubtful
+taxes
+strains
+world
+refreshing
+puppet
+executions
+ladies
+skids
+cheekbone
+fathering
+cigar
+laundries
+attests
+Your
+fascinations
+blueprint
+more charming
+outfitting
+ambiguities
+boots
+mechanisms
+chrome
+oppressed
+mat
+necessitated
+discern
+bios
+Master
+recovers
+impeding
+legalizing
+roaring
+legendary
+preserving
+gravity
+catered
+adaptive
+warlords
+throttled
+Bisexual
+sensor
+neat
+competitor
+insiders
+blenders
+vying
+velvet
+star
+elbowing
+biodiversity
+lama
+Collapse
+sweetnesses
+virgin
+second-hand
+clouted
+resented
+bossing
+benevolent
+weep
+bypassing
+lightened
+itch
+overcoming
+brakes
+persisting
+disposition
+lazy
+wheeled
+mumble
+racist
+most magnificent
+profit
+potentially
+embracing
+combos
+assaulting
+apple
+waxing
+Ivories
+counties
+shop
+slabs
+pair
+commodities
+conspiracies
+restricting
+intensify
+december
+staircases
+differ
+explosive
+embassies
+measures
+grams
+mapping
+backlashing
+caricature
+reversed
+enlisted
+injustice
+Pound
+Obsessive
+generals
+serenity
+opium
+excessive
+thwart
+revolutions
+elaborates
+more geographic
+culminate
+pervading
+orphanage
+unnecessary
+shutter
+workloads
+clawed
+adjustments
+washings
+passer
+redesign
+adobe
+maverick
+recollection
+empowerments
+sellout
+fisherman
+centimetres
+practises
+bridged
+forwards
+Intriguing
+allege
+defense
+petites
+roar
+dugout
+miracle
+advertiser
+daisies
+tougher
+Three
+Steep
+leans
+operates
+allowing
+lodging
+collapsed
+disarmament
+policies
+mines
+condemning
+patrons
+precaution
+canvases
+papal
+demons
+disappears
+offend
+invariably
+Instrumental
+hassling
+councilman
+buttons
+readiness
+Damned
+know
+fluid
+hers
+cloths
+bittersweets
+marketings
+Federal
+combines
+recessed
+wickeder
+rescues
+grouping
+Israelis
+shred
+tickled
+Loosely
+whaled
+conclusions
+mother-in-law
+silkier
+more useful
+numbed
+threats
+lava
+starred
+spares
+refinery
+delegate
+likewise
+locating
+Motivated
+prefer
+dating
+squaring
+anchored
+bumper
+secretive
+camp
+peanut
+bullock
+compute
+sequence
+loci
+lasers
+render
+dart
+baseman
+packages
+thank
+retains
+accentuate
+seeded
+tactic
+bombers
+cascade
+appeals
+gobbling
+segregates
+Networking
+blessed
+is
+yielding
+completing
+crab
+dune
+Union
+separated
+actings
+designed
+skewing
+scholar
+beavers
+expanses
+unsuccessful
+sneaker
+biasing
+fertility
+erasing
+Temples
+firefighters
+shrillest
+survives
+experiments
+theorizing
+clinicians
+snowed
+arteries
+asteroids
+hierarchies
+submissions
+gift
+messenger
+champ
+facing
+protein
+delegates
+collages
+mars
+forbids
+amazon
+reprimand
+catalogs
+phoning
+thinly
+carvings
+stories
+socially
+overruns
+glossed
+decency
+piled
+titan
+haziest
+Saturdays
+Fireworks
+ropes
+mp
+manipulates
+excuse
+stems
+albeit
+youngsters
+flurrying
+eating
+juror
+autobiography
+mares
+glimmered
+solids
+shingles
+planes
+walks
+arbors
+chunk
+melody
+slowly
+hunts
+flanking
+peddles
+fingering
+pinpointed
+rhythms
+Cap
+Nazi
+revolutionaries
+cologne
+riches
+regretful
+dodge
+Polish
+sweatiest
+Tropical
+referendums
+diesel
+fat
+menacing
+wallpapered
+replenishes
+barged
+disproportionate
+requested
+tourists
+tracked
+prejudiced
+preparation
+Ultraviolet
+plazas
+erected
+victimization
+offends
+bobs
+metropolises
+never
+universe
+outdoors
+trailed
+spammed
+crops
+pertinent
+saturating
+leagues
+hears
+organs
+senior
+fiat
+postures
+incident
+slight
+tactics
+yeast
+cheat
+shots
+sedan
+increasingly
+witched
+dirty
+burial
+maintaining
+brow
+more truthful
+timetables
+Narcotic
+squirming
+prostitute
+mashing
+chic
+ligament
+gravy
+thus
+dulled
+mouths
+real-time
+resets
+canada
+fierier
+stifle
+panicking
+replenishing
+Correction
+COKE
+drawings
+gigs
+leftover
+embodiment
+egyptian
+blinks
+moaned
+warm-up
+friendlier
+knowingly
+journalism
+typists
+inhales
+exploitation
+canopies
+aboard
+standpoint
+elves
+ruptured
+most ingenious
+christmas
+replicate
+responds
+dues
+Biological
+ruts
+really
+hi
+caucasian
+reliably
+hemmed
+accommodates
+heatings
+recruiter
+margarine
+self-portraits
+softer
+rape
+abolishes
+oppositions
+resembled
+exhausting
+thump
+ecology
+butted
+rocker
+technological
+smirked
+massages
+escalated
+experimented
+associating
+so-called
+formation
+Frost
+retrospectives
+cracker
+further
+milk
+sagas
+shoes
+fancies
+promised
+obese
+Frigid
+admission
+separately
+immerses
+discriminated
+skillfully
+re-elections
+odyssey
+luckily
+Eye
+indices
+god
+cover-up
+probing
+lunar
+annexed
+childhood
+teams
+marina
+surpassing
+summed
+divined
+grasping
+luxurious
+undress
+brake
+neighbor
+outlawing
+flailing
+Kin
+participating
+lunge
+caressing
+Never
+crackle
+hone
+brother
+intend
+more artistic
+entry
+gangster
+trumpeted
+results
+dealership
+parallel
+encircling
+tout
+unravelled
+smile
+realize
+coinciding
+deciphers
+occupied
+defence
+lamps
+owed
+odour
+punch
+cottons
+intimate
+scrolled
+clerked
+quizzed
+songwriter
+Death
+apologize
+synthesis
+answer
+bars
+powdering
+bookshelves
+unwanted
+tilling
+longing
+sympathizing
+more tolerant
+burdening
+learns
+each
+vacationed
+costlier
+stitch
+accountants
+touching
+commemorates
+funds
+microphones
+hamper
+touring
+frenzies
+situate
+Tonics
+perish
+viewings
+weddings
+devout
+Aggregates
+flavored
+drainage
+snacking
+windy
+drape
+most telling
+bunch
+trainer
+binging
+nutrient
+governor
+digest
+swamped
+rostering
+diapers
+ebbs
+most glamorous
+aggravating
+chosen
+clad
+jealous
+dredges
+southerners
+parrots
+however
+kinship
+bore
+lorded
+appropriated
+everyday
+past
+heralded
+prophetic
+Marine
+banged
+ganging
+chuckles
+couching
+trolleybus
+constitutes
+removals
+arbitrary
+interfaced
+exemption
+more ethical
+worthier
+precedents
+dented
+instructions
+missed
+gobble
+certain
+mix
+flew
+touted
+plan
+Thymus
+glinting
+obstructs
+clamored
+hut
+swerved
+inquire
+achievements
+wording
+unsure
+defects
+particular
+cavities
+shudders
+Col
+moratorium
+lived
+rest
+Mortal
+bases
+rowed
+lend
+composites
+worldly
+sickness
+sponged
+repeat
+prejudicing
+insurers
+pool
+descriptions
+eventually
+more predatory
+walker
+raided
+most colonial
+mingled
+footstep
+lowers
+dummies
+allies
+anguished
+stratum
+peaks
+straightening
+burnouts
+kilometre
+you
+perimeters
+spinal
+hitter
+impeachment
+Collector
+tickling
+inequities
+wig
+ounces
+Viral
+backfire
+intents
+undoing
+confiscate
+microscopes
+shuttered
+Rubber
+voter
+subversive
+dial
+canyons
+biddings
+Anti-Semitism
+pubs
+decoy
+bible
+adored
+shafting
+adequate
+unfamiliar
+Listing
+foggier
+hammering
+laptops
+Carbons
+healers
+taller
+autograph
+two-story
+beaten
+policewomen
+variation
+invites
+resolved
+hates
+hard-core
+alternatively
+cannons
+wadding
+collar
+striking
+tack
+chaplain
+seductive
+Critical
+useful
+poised
+sauteed
+Georgian
+agreement
+veering
+mantel
+pleases
+businessman
+homework
+arguably
+putted
+minutes
+parties
+Copyright
+headlines
+reassure
+hikes
+chatters
+high-rises
+thrives
+clot
+revert
+Illustrator
+barrelled
+incompetents
+southerner
+scapegoating
+piercings
+envisioning
+flawed
+brushing
+squirreling
+holds
+Shot
+proprietary
+sober
+donor
+crotch
+brighten
+benefiting
+surfers
+borrowings
+substantially
+converts
+Thursday
+timbers
+anti-semitism
+turnouts
+mill
+shatter
+chipping
+primarily
+ascertain
+sandal
+climaxing
+Predominant
+reform
+portray
+Episcopal
+hymn
+thirsty
+fuzziest
+vouchered
+irises
+hotel
+drying
+disclosed
+disable
+conversely
+plow
+patents
+palling
+brush
+mugging
+deepen
+Hindus
+pants
+blunt
+become
+treadmills
+sectors
+exhausts
+inset
+spectrum
+artificials
+sides
+warn
+catastrophe
+quested
+loggings
+swimmer
+calculators
+fooling
+brunch
+favourite
+fancying
+downhill
+help
+deduct
+doubting
+dealer
+furnishing
+tipping
+What
+more foolish
+punctured
+equip
+cosmic
+trips
+propelling
+sagging
+nation-state
+groves
+revolutionize
+trendier
+gray
+avoiding
+enthusiasm
+dell
+bit
+recommend
+medaled
+affects
+culminating
+trickiest
+maintain
+shortcut
+Honor
+strays
+full-scale
+scoping
+apiece
+discounted
+vapours
+intercourse
+Coronary
+geography
+unlocked
+reformers
+acuter
+diluted
+programmes
+Hub
+spooks
+stowed
+dines
+mowing
+pokers
+jailing
+napkins
+vowing
+reaches
+acquiring
+license
+policymaker
+heirloom
+salting
+academics
+corroborated
+aliens
+structuring
+honest
+Swimming
+shinned
+deadlines
+developer
+condones
+parade
+inks
+approved
+seedling
+fences
+prostate
+paralyze
+Congressional
+called
+crutches
+opposing
+surrounded
+inexpensive
+guitarists
+landing
+liberties
+peripheral
+prompting
+more serious
+clandestine
+sacrament
+away
+credits
+safety
+overhearing
+paper
+right
+pencils
+version
+investing
+angered
+sensitivity
+executing
+hangers
+mediates
+aisle
+gene
+instituted
+sourer
+slumped
+exposition
+fortunate
+menus
+Swedish
+vectors
+rents
+Koran
+referring
+interfaces
+thin
+prohibiting
+haunt
+monks
+swoop
+globalization
+unto
+elicit
+groins
+muses
+most scenic
+guts
+railroad
+attendee
+styled
+organising
+training
+haul
+optimism
+deport
+proofing
+happier
+dunks
+hardwoods
+felt
+rumbling
+generalizing
+installments
+specified
+Uneven
+publicized
+demographic
+dearly
+foremen
+organizing
+opens
+remorsing
+precision
+fusing
+chatted
+SICK
+railroads
+Aha
+nearest
+berthed
+outlook
+conserved
+skin
+concretes
+crampedest
+smell
+fatigued
+Senior
+reviving
+cascades
+convergences
+tampering
+wades
+priced
+temporarily
+stink
+bind
+hockey
+anticipates
+barges
+Beaten
+damps
+stocked
+tote
+puncturing
+anecdotal
+inventing
+pigs
+squashes
+simple
+blender
+kurdish
+novices
+televising
+panelists
+Disabled
+extinguished
+Carver
+assumptions
+marching
+overeating
+prenatal
+trenches
+dish
+text
+alternated
+yellower
+duration
+publicize
+alleges
+Insufficient
+overhead
+objectives
+fathers'
+headset
+amass
+beautifully
+chuck
+superiority
+long-range
+terminology
+fades
+witnessed
+disabilities
+array
+anesthesia
+traditionally
+exemplars
+rendered
+kneeling
+eclipse
+laughed
+contributed
+stump
+scrutinize
+leg
+sights
+bares
+seamed
+elementary
+wasp
+self-report
+reassures
+hints
+peeling
+brooked
+cleverest
+christ
+hurdles
+fonder
+shafts
+closeness
+shouts
+discarding
+Chechen
+switches
+crunched
+sues
+cousin
+Renowned
+merchandised
+marshes
+Genesis
+plume
+noise
+Sensitive
+hazard
+downsides
+rocketed
+billows
+tunnel
+more thankful
+veered
+beyond
+carbohydrate
+derricks
+Am
+accelerating
+entrees
+airborne
+incur
+tempting
+outward
+parrot
+Lineup
+hue
+human
+branch
+sophomores
+faraway
+lifelong
+face
+stumbling
+courted
+alley
+obsoletes
+Cosmos
+ensues
+violate
+formatting
+dessert
+overcame
+quartering
+boasts
+wreaks
+Gold
+settler
+realized
+chaining
+ledges
+Sake
+abduction
+favourites
+neck
+twin
+homosexual
+Beets
+more dubious
+chip
+DJ
+Jordanians
+boxed
+Dummy
+does
+clumsy
+wines
+boost
+reside
+sods
+Grand
+humiliated
+Breaths
+bulges
+annoyed
+equated
+nafta
+membrane
+bestows
+taxpayer
+tiptoed
+effect
+utter
+fun
+tinnitus
+turbulence
+coconut
+stretched
+spider
+icy
+armored
+healer
+bust
+helping
+curators
+fluids
+bites
+silencing
+kindergartens
+odds
+chocolates
+resided
+torpedoes
+coerce
+sing
+up
+Strip
+more poisonous
+prevailing
+coaxes
+extinguishing
+co-author
+acceleration
+scepticals
+film
+ports
+overflows
+Brown
+impulses
+lessons
+unearth
+exactly
+more considerable
+Acting
+honked
+enlarge
+scare
+corn
+socialization
+smash
+timed
+stitches
+leopards
+Beautiful
+shadows
+friendliness
+confiscates
+protestant
+anxiety
+proof
+tenor
+more attractive
+duck
+egalitarian

+ 62 - 0
data/get_all_exchange_words.py

@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+from tools.new_mysql import MySQLUploader
+import sys
+import os
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+m = MySQLUploader()
+s = "select Word,InflectedWordSpelling,Properties from dictionary_exchange"
+r = m.query_data(s)
+m.close_connection()
+
+all_exchange_words = set()
+all_exchange_words_dict = {}
+all_prototype_deformation_dict = {}
+prototype_deformation_dict2 = {}
+
+for i in r:
+   
+    prototype,deformation,properties= [i[0],i[1],i[2]]
+   
+    all_exchange_words.update({prototype,deformation})
+
+    if properties == "原型":
+        prototype_deformation_dict2[prototype] = deformation
+
+   
+    if deformation not in all_prototype_deformation_dict:
+        all_prototype_deformation_dict[deformation] = prototype
+
+    if prototype not in all_exchange_words_dict:
+        all_exchange_words_dict[prototype] = [deformation]
+    if deformation not in all_exchange_words_dict[prototype]:
+        all_exchange_words_dict[prototype].append(deformation)
+
+
+
+def word_to_prototype(word:str) -> str:
+    """依次按顺序查询。1.先查原型 2.最后小写再查变形对应的原型 3.再查变形对应的原型。这样才能保证,不过滤有特殊意义的大写"""
+    if word in all_exchange_words_dict:
+        return word
+    elif word.lower() in all_exchange_words_dict:
+        return word.lower()
+   
+    elif word in all_prototype_deformation_dict:
+        w = all_prototype_deformation_dict[word]
+        if w in prototype_deformation_dict2:
+            w = prototype_deformation_dict2[w]
+        return w
+    else:
+        return word
+
+
+def get_word_exchange_list(word) -> list:
+    prototype_word = word_to_prototype(word)
+    all_exchange_words_list = all_exchange_words_dict.get(prototype_word,[])
+    return all_exchange_words_list
+
+
+if __name__ == '__main__':
+    print(word_to_prototype("was"))
+    print(word_to_prototype("made"))

+ 18 - 0
data/get_frequency_script.py

@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+from openpyxl.worksheet.worksheet import Worksheet
+from openpyxl import load_workbook
+import json
+
+wb = load_workbook(r"单词词义表.xlsx",read_only=True)
+word_dict = {}
+ws: Worksheet = wb["Sheet1"]
+for row in ws.values:
+    _,word,frequency = row
+    frequency = int(frequency)
+   
+    word_dict[frequency] = word
+wb.close()
+with open("json_word_frequency.json",mode="w",encoding="utf-8") as f:
+   
+    write_data = json.dumps(word_dict)
+    f.write(write_data)

File diff suppressed because it is too large
+ 0 - 0
data/json_word_frequency.json


BIN
data/单词词义表.xlsx


BIN
data/春笋单词对照变形.xlsx


+ 110 - 0
deepseek/ds_api.py

@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+import json
+
+from openai import OpenAI
+import os
+from tools.loglog import SimpleLogger
+
+
+
+
+class DS:
+    def __init__(self):
+        self.client = OpenAI(
+            api_key=os.getenv("DASHSCOPE_API_KEY"),
+            base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
+        )
+        self.logger = SimpleLogger(base_file_name="deepseek")
+
+    def write_log(self, message:str, log_type="info"):
+        """写入日志"""
+        log_methods = {
+            "warning": self.logger.warning,
+            "error": self.logger.error,
+            "info": self.logger.info
+        }
+        log_methods.get(log_type, self.logger.info)(message=message)
+
+    def check_article_response(self, response: str) -> bool:
+        """校验文章的回复是否符合预期格式"""
+        try:
+            resp_json = json.loads(response)
+            required_fields = ["english", "chinese", "difficultSentences"]
+            return all(field in resp_json for field in required_fields)
+        except Exception as e:
+            self.write_log(f"Response validation error: {e}", log_type="error")
+            return False
+
+    def get_article(self, user_prompt: str, sys_prompt: str = None, temperature: float = 0.8, 
+                   json_resp: bool = False, real_ip: str = "", demo_name: str = "", 
+                   max_tokens: int = 5192) -> str:
+        """获取AI生成的文章
+        
+        Args:
+            user_prompt: 用户输入的提示词
+            sys_prompt: 系统提示词
+            temperature: 温度参数,控制输出的随机性
+            json_resp: 是否返回JSON格式
+            real_ip: 用户IP
+            demo_name: 演示名称
+            max_tokens: 最大token数
+            
+        Returns:
+            str: AI生成的回复内容
+        """
+        messages = []
+        if sys_prompt:
+            messages.append({'role': 'system', 'content': sys_prompt})
+        messages.append({'role': 'user', 'content': user_prompt})
+        
+        response_format = {"type": "json_object"} if json_resp else {"type": "text"}
+        
+       
+        resp = ""
+        for _ in range(3):
+            completion = self.client.chat.completions.create(
+                model="deepseek-v3", 
+                messages=messages,
+                temperature=temperature,
+                response_format=response_format,
+                max_tokens=max_tokens 
+            )
+            resp = completion.choices[0].message.content
+            if self.check_article_response(resp):
+                break
+        
+       
+        if sys_prompt and resp:
+            self.write_log(sys_prompt)
+        self.write_log(user_prompt)
+        self.write_log(resp)
+        
+        return resp
+
+
+if __name__ == '__main__':
+    os.chdir('..')
+    p = """下面我会为你提供两组数据,[单词组1]和[单词组2](里面包含词义id,英语单词,中文词义),优先使用[单词组1]内的单词,请根据这些单词的中文词义,生成一篇带中文翻译的考场英语文章,英语文章和中文翻译要有[标题]。注意这个单词有多个词义时,生成的英语文章一定要用提供的中文词义。并挑选一句复杂的句子和其中文翻译,放入difficultSentences。英语文章,放入"englishArticle"中。中文翻译,放入"chineseArticle"中。最终文中使用到的单词id放入"usedMeanIds"中。4个选择题,放入questions字段。questions结构下有4个选择题对象,其中trunk是[英语]问题文本,analysis是[中文]的问题分析,candidates是4个ABCD选项,内部有label是指选项序号A B C D ,text是[英语]选项文本,isRight是否正确答案1是正确0是错误。
+
+提供[单词组1]:847 protect:保护;592 bear:出生, 结果;601 lie:位于;431 close:近, 靠近;1031 direction:方向;1282 coffee:咖啡豆;303 once:曾经;827 raise:养育;373 follow:听懂, 领会;1286 solve:解决, 解答;
+提供[单词组2]:1288 destroy:破坏, 摧毁;1290 project:放映, 展现;1292 waste:浪费, 荒芜, 废物;1293 environment:环境, 外界;1294 memory:记忆, 记忆力, 回忆;
+
+要求:
+1.必须用提供的这个词义的单词,其他单词使用常见、高中难度的的单词。文章整体难度适中,大约和中国的高中生,中国CET-6,雅思6分这样的难度标准。
+2.优先保证文章语句通顺,意思不要太生硬。不要为了使用特定的单词,造成文章语义前后不搭,允许不使用个别词义。
+3.文章中使用提供单词,一定要和提供单词的中文词义匹配,尤其是一词多义时,务必使用提供单词的词义。必须要用提供单词的词义。如果用到的词义与提供单词词义不一致,请不要使用这个单词。
+4.生成的文章要求500词左右,可以用\n\n字符分段,一般5个段落左右。第一段是文章标题。
+5.生成文章优先使用[单词组1]的词义,其次可以挑选使用[单词组2]的词义。允许不使用[单词组1]的个别单词,优先保证文章整体意思通顺连贯和故事完整。
+6.回复标准json数据,示例:
+{"difficultSentences":[{"english":"string","chinese":"string"}],"usedMeanIds":[0,0,0],"englishArticle":"string","chineseArticle":"string","questions":[{"trunk":"string","analysis":"string","candidates":[{"label":"string","text":"string","isRight":0}]}]}
+
+"""
+    ds = DS()
+    resp = ds.get_article(user_prompt=p,json_resp=True)
+    print(resp)
+    print()
+
+    print(resp.replace(r'\"n','\n').replace(r"\\n",'\n'))
+    print()
+
+    print(json.loads(resp))

+ 271 - 0
deepseek/get_article3.py

@@ -0,0 +1,271 @@
+# -*- coding: utf-8 -*-
+
+import re
+import json
+from deepseek.ds_api import DS
+
+from tools.new_mysql import MySQLUploader
+from tools.loglog import logger, log_err_e
+from tools.thread_pool_manager import pool_executor
+from common.common_data import all_exchange_words
+from common.split_text import split_text_to_word
+
+from pydantic import BaseModel
+from cachetools import TTLCache
+from concurrent.futures import wait
+from random import randint, shuffle
+import json
+import requests
+
+
+def get_article_difficulty(article) -> int:
+    """获取文章的难度值"""
+    url = "http://qbank.yunzhixue.cn/api/article/analysis"
+
+    data = {"body": article, "question": ""}
+    try:
+        response = requests.post(url, json=data)
+    except Exception as e:
+        log_err_e(e, msg="获取文章难度值;")
+        return 0
+
+    if response.status_code == 200:
+        difficult_value = response.json()['data']['difficult']
+        return difficult_value
+    else:
+        logger.error(f"错误状态码{response.status_code}")
+
+
+def find_interval(number):
+    """
+    判断一个数字属于哪个难度等级区间。31级是例外情况,需要排查
+
+    :param number: 要检查的数字。
+    :return: 返回包含该数字的区间,如果没有找到,则返回 None。
+    """
+    intervals = [(1, 200), (201, 250), (251, 300), (301, 350), (351, 400), (401, 450), (451, 550), (551, 650), (651, 750), (751, 850), (851, 950),
+                 (951, 1100),
+                 (1101, 1250), (1251, 1400), (1401, 1550), (1551, 1700), (1701, 1900), (1901, 2100), (2101, 2300), (2301, 2600), (2601, 2900),
+                 (2901, 3200),
+                 (3201, 3500), (3501, 3900), (3901, 4300), (4301, 4700), (4701, 5100), (5101, 5500), (5501, 5900), (5901, 6500), (6501, 99999)]
+    for index, (start, end) in enumerate(intervals, start=1):
+        if start <= number <= end:
+            return index
+    logger.error(f"文章难度判断不对:{number}")
+    return 0
+
+
+def parse_question(question_block):
+    question_info = {}
+
+   
+    question_match = re.search(r'问题:\s*(.*)', question_block)
+    if question_match:
+        question_info['trunk'] = question_match.group(1).strip()
+
+   
+    analysis_match = re.search(r'解析:\s*(.*)', question_block)
+    if analysis_match:
+        question_info['analysis'] = analysis_match.group(1).strip()
+
+   
+    options_match = re.search(r'选项:(.*)', question_block)
+    if options_match:
+        options_text = options_match.group(1).strip()
+        options_list = re.split(r'\s*[BCDA]\.\s*', options_text)[1:]
+        candidates = []
+        for i, option_text in enumerate(options_list, start=65): 
+            label = chr(i)
+            text = option_text.strip()
+            candidates.append({
+                "label": label,
+                "text": text,
+                "isRight": 0
+            })
+        question_info['candidates'] = candidates
+
+   
+    answer_match = re.search(r'答案:([ABCD])', question_block)
+    if answer_match and 'candidates' in question_info:
+        correct_label = answer_match.group(1)
+        for candidate in question_info['candidates']:
+            if candidate['label'] == correct_label:
+                candidate['isRight'] = 1
+
+    return question_info
+
+
+class GetArticle:
+    def __init__(self):
+        self.m = MySQLUploader()
+        self.ds = DS()
+
+        self.callback_url_dict = {}
+        self.real_ip_dict = {} 
+        self.demo_name = {}
+
+       
+        self.punctuation = [",", ".", "!", "?", ":", ";", '"', "–", "_", "-", "...", "......"]
+        all_exchange_words.update(self.punctuation)
+
+
+   
+    def parser_insert_to_mysql(self, resp_result):
+        for single_article in resp_result['articles']:
+           
+            article = single_article['body']
+            article_json = json.dumps(single_article)
+            difficult_value = find_interval(get_article_difficulty(article)) 
+            if not difficult_value:
+                logger.error("文章难度等级为0;")
+            sql = "INSERT INTO spring_bamboo_article (article_json,difficult_level) VALUES (%s,%s)"
+            self.m.execute_(sql, (article_json, difficult_value))
+
+   
+    def submit_task(self, words_meaning_list: list, take_count: int, student_stage: int, real_ip: str, demo_name: str):
+        """
+        words_meaning_ids: 词义id 包含词义ID的数组集合,用于生成文章。- 示例:[110, 111, 112, 113, 114]
+        take_count: 取文章数量 (int类型,正常是2篇,最大8篇)
+        student_stage: 学段(int类型:1.小学;2.初中;3.高中;)
+        demo_name: 项目名称
+        """
+        task_id = randint(10000000, 99999999)
+       
+        words_meaning_str = ";".join([i["spell"] + ":" + i["meaning"] for i in words_meaning_list])
+        logger.info(f"生成文章id。task_id:{task_id}。词义组:{words_meaning_str}.")
+
+        self.real_ip_dict[task_id] = real_ip
+        self.demo_name[task_id] = demo_name
+
+        try:
+           
+            resp_result = self.run_task(words_meaning_list, task_id, take_count, student_stage)
+            self.parser_insert_to_mysql(resp_result) 
+            return resp_result
+        except Exception as e:
+            err_msg = f"GetArticle提交任务失败{type(e).__name__},{e}"
+            log_err_e(e, msg="GetArticle提交任务失败;")
+
+            return err_msg
+
+   
+    def get_article(self, words_meaning_list, student_stage, task_id, take_count) -> dict:
+        diffculty_control = {
+            1: {"grade": "小学", "article_word_count": 60, "desc_difficulty": "最简单最容易没有难度", "paragraph_count": 1,
+                "desc2": "文章整体非常简洁,通俗易懂,适合初学者,刚入门,单词全是最常见的,语句通顺即可。",
+                "choice_desc":"选择题难度尽可能简单,但是不要让所有选择题让其直接在文中找到答案,允许1-2个选择题很简单,参考中国小学生水平"},
+            2: {"grade": "初中", "article_word_count": 200, "desc_difficulty": "简单、常见、难度低", "paragraph_count": 3,
+                "desc2": "文章整体难度适中,大约和中国初中生,中国CET-3,雅思4分这样的难度标准。",
+                "choice_desc":"选择题难度适中,但是不要所有选择题让其直接在文中找到答案,参考中国初中生水平,中考标准。"},
+            3: {"grade": "高中", "article_word_count": 300, "desc_difficulty": "常见、高中难度的", "paragraph_count": 3,
+                "desc2": "文章整体难度适中,大约和中国的高中生,中国CET-4,雅思5分这样的难度标准。",
+                "choice_desc":"选择题难度偏难,要有迷惑性,不要出现直接在文中找到答案,参考中国高中生水平,高考标准。"}
+        }
+        take_count_dict = {0: "", 1: "一", 2: "二", 3: "三", 4: "四", 5: "五", 6: "六", 7: "七", 8: "八"}
+        different_cou = take_count_dict.get(take_count, "")
+
+        grade = diffculty_control[student_stage]["grade"] 
+        select_word_count = diffculty_control[student_stage]["article_word_count"] 
+        select_diffculty = diffculty_control[student_stage]["desc_difficulty"] 
+        select_paragraph_count = diffculty_control[student_stage]["paragraph_count"] 
+        desc2 = diffculty_control[student_stage]["desc2"] 
+        choice_desc = diffculty_control[student_stage]["choice_desc"] 
+       
+
+       
+        shuffle(words_meaning_list)
+        words_meaning_str = ";".join([i["spell"] + ":" + i["meaning"] for i in words_meaning_list])
+
+        q = f"""不要与前面{different_cou}篇一样,要不同的场景。你是一名在中国的英语教师,下面我会为你提供一些带中文词义的英语单词,请根据这些单词的中文词义,\
+生成带英文标题和中文翻译的考场英语文章,注意这个单词有多个词义时,生成的英语文章一定要用提供的中文词义。并挑选一句复杂的句子和其中文翻译,放入difficultSentences。
+提供单词:{words_meaning_str}
+
+要求:
+1.必须用提供的这个词义的单词,其他单词使用{select_diffculty}的单词。{desc2}
+2.文章中使用提供单词,一定要和提供单词的中文词义匹配,尤其是一词多义时,务必使用提供单词的词义。必须要用提供单词的词义。如果用到的词义与提供单词词义不一致,请不要使用这个单词。
+3.生成的文章要求{select_word_count}词左右,可以用\\n\\n字符分段,一般{select_paragraph_count}个段落左右。
+4.优先保证文章语句通顺,意思不要太生硬。不要为了使用特定的单词,造成文章语义前后不搭,允许不使用个别词义。
+5.回复json,格式:{{"title":英文标题,"english":英语文章,"chinese":中文翻译,"difficultSentences": [
+    {{
+        "english": "",
+        "chinese": ""
+    }}
+]}}
+"""
+        try:
+            real_ip = self.real_ip_dict[task_id]
+            demo_name = self.demo_name[task_id]
+            r_json = json.loads(self.ds.get_article(q, temperature=1, json_resp=True, real_ip=real_ip, demo_name=demo_name,max_tokens=8000))
+
+           
+            r_json["body"] = r_json["title"] + "\n\n" + r_json["english"]
+            del r_json["title"]
+
+            q_choice_question = f"""你是一名在中国的{grade}英语教师,下面我会为你提供一篇英语短文,请根据短文设计4个选择题.
+短文:{r_json["english"]}
+
+###要求:
+1. 生成的选择题不要让学生从短文中直接找到答案,可以混淆,最好让学生推理或排除获得正确答案。用词可以{select_diffculty},出题要参考中国的中考高考。
+2.{choice_desc}
+3. 每个选择题之间间隔两行,回复格式如下:
+
+问题: 英语的选择题问题1文本;
+解析: 中文的选择题答案解析;
+选项:A. 选项1  B. 选项2  C. 选项3  D. 选项4
+答案:B
+
+其他几个选择题依次类推
+"""
+
+            resp_text = self.ds.get_article(q_choice_question, temperature=1, real_ip=real_ip, demo_name=demo_name, max_tokens=8000)
+            questions = resp_text.strip().split('\n\n')
+           
+            parsed_questions = [parse_question(q) for q in questions]
+
+            json_data = {"questions": parsed_questions}
+
+           
+            allWordAmount = 0
+            allWordAmount += len(split_text_to_word(r_json["english"]))
+            for i in json_data["questions"]:
+                count_trunk = len(split_text_to_word(i["trunk"]))
+                count_candidates = sum([len(split_text_to_word(ii["text"])) for ii in i["candidates"]])
+                allWordAmount += count_trunk
+                allWordAmount += count_candidates
+
+            return {**r_json, **json_data, "allWordAmount": allWordAmount}
+        except json.decoder.JSONDecodeError:
+            logger.error("gpt生成文章回复json格式化错误")
+        except Exception as e:
+            logger.error(f"gpt生成文章回复其他错误.{type(e).__name__} {e}")
+
+
+   
+    def run_get_article_task(self, words_meaning_list, task_id, take_count, student_stage) -> dict:
+        """
+        :param words_meaning_list: 数据库内查出来的单词和词义的列表
+        :param task_id: 任务id
+        :param take_count: 文章数量
+        :param student_stage: 学段标识,整型
+        :return:
+        """
+        futures = []
+        for i in range(take_count):
+            futures.append(pool_executor.submit(self.get_article, words_meaning_list, student_stage, task_id, take_count))
+        wait(futures)
+        return_json = {"articles": []}
+        for t in futures:
+            return_json["articles"].append(t.result())
+        return return_json
+
+   
+    def run_task(self, words_meaning_list, task_id, take_count, student_stage):
+        try:
+            outside_json = self.run_get_article_task(words_meaning_list, task_id, take_count, student_stage)
+            logger.success(f"文章3-DeepSeek文章任务完成。taskid:{task_id}")
+            return outside_json
+        except Exception as e:
+            logger.error(f"{type(e).__name__} {e}")
+        finally:
+            self.real_ip_dict.pop(task_id)
+            self.demo_name.pop(task_id)

+ 112 - 0
gpt/chatgpt.py

@@ -0,0 +1,112 @@
+# -*- coding:utf-8 -*-
+if __name__ == '__main__':
+    import os
+    os.chdir("..")
+
+import requests
+import random
+import time
+from tools.loglog import logger,simple_logger
+from tools.new_mysql import MySQLUploader
+from typing import Optional, Dict, Any,Union
+
+
+m = MySQLUploader()
+
+def get_openai_model(model_text:str):
+    """模糊获得模型名"""
+    if "3.5" in model_text or "3.5-turbo" in model_text or "3.5turbo" in model_text:
+        model = "gpt-3.5-turbo"
+    elif "4o" in model_text or "gpt4o" in model_text:
+        model = "gpt-4o"
+    elif "4turbo" in model_text or "4-turbo" in model_text:
+        model = "gpt-4-turbo"
+    else:
+        model = "gpt-4o"
+    return model
+
+
+def insert_ip_token(ip,demo_name,gpt_content,prompt_tokens,completion_tokens,total_tokens):
+    sql = "insert into consumer_token (ip,demo_name,gpt_content,prompt_tokens,completion_tokens,total_tokens) values (%s,%s,%s,%s,%s,%s)"
+    m.execute_(sql,(ip,demo_name,str(gpt_content),prompt_tokens,completion_tokens,total_tokens))
+
+def get_answer_from_gpt(question,real_ip="localhost",demo_name="无",model="gpt-4o",max_tokens=3500,temperature:float=0,
+                        json_resp:Union[Dict[Any, Any],bool]=False,n=1,check_fucn=None,sys_prompt=None):
+    model = get_openai_model(model)
+
+   
+    d2 = {"model": model,"messages": [],"max_tokens": max_tokens,"temperature": temperature,'n': n}
+    if sys_prompt:
+        d2['messages'].append({"role": "system", "content": sys_prompt})
+    d2['messages'].append({"role": "user", "content": question})
+
+    if json_resp is True:
+        d2["response_format"] = {"type": "json_object"}
+    elif json_resp is False:
+        pass
+    else:
+        d2["response_format"] = json_resp
+
+    for num_count in range(3):
+        try:
+           
+            response = requests.post(f'http://170.106.108.95/v1/chat/completions', json=d2)
+            r_json = response.json()
+            if r2:= r_json.get("choices",None):
+                if n>1:
+                    gpt_res = []
+                    for i in r2:
+                        gpt_res.append(i["message"]["content"])
+                else:
+                    gpt_res= r2[0]["message"]["content"]
+
+               
+                gpt_content = str(gpt_res)
+                prompt_tokens = r_json["usage"]["prompt_tokens"]
+                completion_tokens = r_json["usage"]["completion_tokens"]
+                total_tokens = r_json["usage"]["total_tokens"]
+                insert_ip_token(real_ip,demo_name,gpt_content,prompt_tokens,completion_tokens,total_tokens)
+
+                simple_logger.info(f"问题日志:\n{question}\n回答日志:\n{gpt_res}")
+
+               
+                if not check_fucn:
+                    return gpt_res
+
+               
+                check_result = check_fucn(str(gpt_res))
+                if check_result: 
+                    return gpt_res
+                else:
+                    raise Exception(f"第{num_count+1}次共3次,GPT的校验没有通过,校验函数:{check_fucn.__name__}")
+
+            elif r_json.get("message") == "IP address blocked":
+                print("IP address blocked")
+                raise Exception("IP address blocked")
+            else:
+                print(f"小错误:{question[:10]}")
+                logger.error(response.text)
+        except Exception as e:
+            logger.info(f"小报错忽略{e}")
+        time.sleep(10)
+
+    logger.critical("get_answer_from_gpt 严重错误,3次后都失败了")
+
+
+
+def parse_gpt_phon_to_tuplelist(text:str) -> list:
+    """解析gpt返回的音标数据"""
+    result = []
+    if not text:
+        return []
+    for i in text.split("\n"):
+        ii = i.split("***")
+        if len(ii)>=3:
+            result.append((ii[0].strip(),ii[1].strip(),ii[2].strip()))
+    return result
+
+
+if __name__ == '__main__':
+
+    question = "hello"
+    print(get_answer_from_gpt(question,temperature=0.8))

+ 570 - 0
gpt/get_article.py

@@ -0,0 +1,570 @@
+# -*- coding: utf-8 -*-
+import random
+
+from gpt.chatgpt import get_answer_from_gpt
+from tools.new_mysql import MySQLUploader
+from tools.loglog import logger
+from tools.thread_pool_manager import pool_executor
+from common.common_data import all_exchange_words
+from common.split_text import *
+from data.get_all_exchange_words import get_word_exchange_list,word_to_prototype
+
+import requests
+import oss2
+from oss2.credentials import EnvironmentVariableCredentialsProvider
+from collections import OrderedDict
+from cachetools import TTLCache
+from concurrent.futures import Future, wait
+from random import randint
+import re
+import json
+import time
+import traceback
+
+
+class OtherBaseFunction:
+    def __init__(self):
+        self.m = MySQLUploader()
+        self.fake_meaningid = {} 
+
+        self.callback_url_dict = {} 
+        self.real_ip_dict = {} 
+        self.demo_name = {} 
+        self.query_cache_wordspelling = TTLCache(maxsize=2000, ttl=86400) 
+        self.query_cache_meaningid = TTLCache(maxsize=2000, ttl=86400) 
+
+    @staticmethod
+    def _diffculty_control(student_stage, vocabulary) -> dict:
+        """
+        根据学生学段或其词汇量,给与不同的难度控制
+        :param student_stage: 学生学段,123,小学初中高中
+        :param vocabulary: 学生的词汇量,1200小学,2400初中,4800高中
+        :return:
+        """
+        if vocabulary <= 1200:
+            difficult_control = {"difficult_desc": "最简单最基础的入门的初级的幼儿园的毫无难度的", "paragraph_count": 1,"student_stage_str":"小学",
+                                 "pragrapg_count": "生成的文章要求100词左右,三个段落以上。允许有简单句式的出现。"}
+        elif 1200 < vocabulary <= 2400:
+            difficult_control = {"difficult_desc": "简单的容易的常见的难度低的", "paragraph_count": 3,"student_stage_str":"初中",
+                                 "pragrapg_count": r"生成的文章要求150词左右,三个段落以上。用\n\n分段。"}
+        else:
+            difficult_control = {"difficult_desc": "常见的初级的中国高考的", "paragraph_count": 5,"student_stage_str":"高中",
+                                 "pragrapg_count": r"生成的文章要求250词左右,允许有3-5个段落。用\n\n分段。"}
+        return difficult_control
+
+   
+    def _get_article_chinese_dict(self, title, r_article_sentences, task_id):
+        """
+        获取文章的中文翻译。注意:这里切割的方法要与后面的split_article_make_json一致
+        :param title: 标题
+        :param r_article_sentences: 通过生词检验的文章句子列表
+        :return:
+        """
+
+        def get_chinese_from_gpt(whole_article_sentences: list):
+            q = f"""你是一名在中国的英语教师,下面我会为你提供一个英语句子的列表,请按列表顺序将每个句子翻译成中文,结果按列表顺序放在chinese为键的json数组内。
+英语句子列表:{whole_article_sentences}
+
+要求:
+1.中文翻译的结果要按列表的顺序,依次放入sentence数组。回复的中文数量要与英语句子列表的数量一样,不要漏下。
+2.回复json,格式:{{"chinese":[sentence1,sentence2...]}}
+"""
+            real_ip = self.real_ip_dict[task_id]
+            demo_name = self.demo_name[task_id]
+            for cou in range(3):
+                try:
+                    r_json = json.loads(get_answer_from_gpt(q, temperature=0.8, json_resp=True, real_ip=real_ip, demo_name=demo_name))
+                    r_article_chinese_list = r_json.get("chinese")
+                    if len(r_article_chinese_list) == len(whole_article_sentences):
+                        r_article_chinese_dict = {k: str(v) for k, v in zip(whole_article_sentences, r_article_chinese_list)}
+                        return r_article_chinese_dict
+                    logger.warning(f"警告:第{cou + 1}次,中文翻译与原句数量不一致")
+                except json.decoder.JSONDecodeError:
+                    logger.error("gpt生成文章中文翻译,回复json格式化错误")
+                except Exception as e:
+                    logger.error(f"gpt生成文章中文翻译回复其他错误.{type(e).__name__} {e}")
+
+            logger.critical("严重错误:gpt生成文章中文翻译三次全错,请管理员检查")
+
+       
+        article_list = [title + "\n\n"] + r_article_sentences
+
+       
+        r_article_chinese_dict = get_chinese_from_gpt(whole_article_sentences=article_list)
+       
+        if r_article_chinese_dict:
+            return r_article_chinese_dict
+
+   
+    @staticmethod
+    def _calculate_new_word_rate(r_article_sentences):
+        article = "".join(r_article_sentences)
+        new_words = set() 
+        test_article = re.findall(r'\b\w+\'?\w*\b', article)
+        for word in test_article:
+            word2: str = word.split("'")[0] if "'" in word else word
+            if len(word) <= 2: 
+                continue
+            is_in_12000words = any([word2.lower() in all_exchange_words, word2.title() in all_exchange_words])
+            if not is_in_12000words:
+                new_words.add(word)
+        new_word_rate = round(len(new_words) / len(article), 3)
+        logger.info(f"开发调试生词率{new_word_rate}.生词{new_words}")
+       
+        new_words = list(new_words)
+        return new_word_rate, new_words
+
+   
+    def insert_article_to_mysql(self, title, article, chinese, task_id, code=0):
+       
+        self.m.execute_("INSERT INTO new_word_article (title,article,chinese, taskId,code) VALUES (%s, %s,%s,%s,%s)",
+                        (title, article, chinese, task_id, code))
+
+    def get_wordid_by_wordspelling(self, wordspelling:str):
+        """加一个功能。大字典内没有这个单词就自动插入,返回id"""
+        if wordspelling in self.query_cache_meaningid:
+            return self.query_cache_wordspelling[wordspelling]
+
+        s = "select Id from dictionary_word where wordspelling = %s"
+        prototype_word = word_to_prototype(wordspelling)
+        r = self.m.query_data(s, (prototype_word,))
+        if r:
+           
+            wordid = r[0][0]
+        else:
+           
+            wordid = 0
+
+        self.query_cache_wordspelling[wordspelling] = wordid
+        return wordid
+
+    def get_meaning_by_meaningid(self, meaningid:int):
+        """加一个功能。大字典内没有这个单词就自动插入,返回id"""
+        if meaningid in self.query_cache_meaningid:
+            return self.query_cache_meaningid[meaningid]
+
+        s = "select WordMeaning from dictionary_meaningitem where Id = %s"
+        r = self.m.query_data(s, (meaningid,))
+        meaning = r[0][0] if r else ""
+        self.query_cache_meaningid[meaningid] = meaning
+        return meaning
+
+    def _get_fake_meaningid(self,word):
+        """获得假词义id。但是保证同一个单词是一个id"""
+        if word in self.fake_meaningid:
+            return self.fake_meaningid[word]
+        s = "select Id from dictionary_meaningitem where WordSpelling = %s"
+        r = self.m.query_data(s, (word,))
+        if r:
+            fake_meaningid = r[0][0]
+        else:
+            fake_meaningid = random.randint(10000,99999) 
+
+        self.fake_meaningid[word] = fake_meaningid
+        return fake_meaningid
+
+   
+    @staticmethod
+    def _clean_gpt_res(single_sentence: str, gpt_text: str,split_words:list) -> list:
+        """# 解析成  键是句子+单词拼写,值是词义id"""
+        return_data = []
+        if not gpt_text:
+            return []
+
+        row_data = [i for i in gpt_text.split("\n") if "**" in i] 
+
+        already_spelling = set()
+        for row in row_data:
+            one_row_data_list = row.split("**")
+            if len(one_row_data_list) < 1: 
+                continue
+            one_row_data_list = [i.strip() for i in one_row_data_list] 
+            spelling, meaning_id = one_row_data_list[0:2]
+
+            already_spelling.add(spelling)
+            return_data.append([single_sentence, spelling, int(meaning_id)])
+
+       
+        for remaining_word in set(split_words).difference(already_spelling):
+            return_data.append([single_sentence, remaining_word, 0])
+
+        return return_data
+
+
+class GetArticle(OtherBaseFunction):
+    def __init__(self):
+        super().__init__()
+        self.auth = oss2.ProviderAuth(EnvironmentVariableCredentialsProvider())
+        self.bucket = oss2.Bucket(self.auth, 'oss-cn-hangzhou.aliyuncs.com', 'qingti-private')
+
+        self.article_result = {} 
+
+       
+        self.punctuation = [",", ".", "!", "?", ":", ";", '"', "–", "_", "-", "...", "......"]
+        all_exchange_words.update(self.punctuation)
+
+    def __del__(self):...
+
+   
+    def submit_task(self, words_meaning_ids: list[int],callback_url:str,real_ip:str,demo_name:str,
+                    student_stage:int,vocabulary:int,class_id:int):
+        """
+        words_meaning_ids: 词义id 包含词义ID的数组集合,用于生成文章。- 示例:[110, 111, 112, 113, 114]
+        callback_url: 通知的回调地址
+        demo_name: 项目名称
+        student_stage: 学生学段,123
+        vocabulary: 学生词汇量500
+        """
+        task_id = randint(10000000, 99999999)
+        logger.info(f"生成文章id。task_id:{task_id}。词义id:{words_meaning_ids}.")
+
+       
+        self.callback_url_dict[task_id] = callback_url
+        self.real_ip_dict[task_id] = real_ip
+        self.demo_name[task_id] = demo_name
+
+        words_meaning_str = ""
+        for wordmeaning_id in words_meaning_ids:
+            r = self.m.query_data("select WordSpelling,WordMeaning from dictionary_meaningitem where Id = %s",(wordmeaning_id,))
+            try:
+                words_meaning_str += str(r[0])
+            except IndexError:
+                err_msg = f"文章生成任务提交失败。task_id:{task_id},词义表内没有这个词义id:{wordmeaning_id}"
+                logger.error(err_msg)
+                return err_msg
+
+        try:
+           
+            pool_executor.submit(self.run_task, words_meaning_str, task_id,student_stage,vocabulary,class_id)
+           
+            resp_result = {"id":task_id,"key":f"study/article/{task_id}"}
+            logger.success(f"文章生成任务提交成功:{resp_result}")
+            return resp_result
+        except Exception as e:
+            err_msg = f"GetArticle提交任务失败{type(e).__name__},{e}"
+            logger.error(err_msg)
+            return err_msg
+
+   
+    def __get_article(self,words_meaning_str,task_id,student_stage,vocabulary) -> tuple:
+        dc = self._diffculty_control(student_stage,vocabulary)
+        q = f"""你是一名在中国的英语教师,下面我会为你提供一些带中文词义的英语种子单词,请根据这些种子单词的词义,生成一篇带标题的英语文章。
+提供种子单词:{words_meaning_str}
+
+要求:
+1.必须用提供的这个词义的单词,文章的其他单词使用{dc["difficult_desc"]}单词。
+2.文章应以自然、母语水平的英语撰写。请仅使用与种子单词难度相同或更简单的词汇,避免使用更高级的词汇和复杂的句子结构。请使用常用的高频英语词汇,避免使用不常见或专业的词汇。种子单词可以在文章中任意位置出现,不限制顺序。
+2.{dc["paragraph_count"]},为确保词汇难度符合要求,请仅使用 **中国教育部{dc['student_stage_str']}英语词汇表** 中的单词。
+3.请将文章返回一个一个带标点的句子,放在article_sentences里面的数组里。如果有分段,必须请在句子后面加\\n\\n。
+4.回复json,格式:{{"title":标题,"article_sentences":[句子1,句子2]}}
+"""
+        try:
+            real_ip = self.real_ip_dict[task_id]
+            demo_name = self.demo_name[task_id]
+            r_json = json.loads(get_answer_from_gpt(q, temperature=0.8, json_resp=True,real_ip=real_ip,demo_name=demo_name))
+            r_article_sentences = r_json.get("article_sentences")
+            r_title = r_json.get("title")
+            return r_title,r_article_sentences
+        except json.decoder.JSONDecodeError:
+            logger.error("gpt生成文章回复json格式化错误")
+        except Exception as e:
+            logger.error(f"gpt生成文章回复其他错误.{type(e).__name__} {e}")
+
+   
+    def __replace_new_word(self, old_article: str, new_words: list,task_id:int):
+        new_words_str = ",".join(new_words)
+        q = f"""你是一名在中国的英语教师,下面我会为你提供一篇英语文章和一些生词,请用其他单词使用简单、常见、难度低的单词将英语文章中的生词进行替换。
+缩写引号用单引号'。最终回复替换后的英语文章。
+
+英语文章:{old_article}
+生词:{new_words_str}
+
+要求:
+1.替换掉所有生词,替换单词使用简单、常见、难度低的单词。
+2.生成的文章要求150词左右,可以分段。
+3.回复json,格式:{{"title":标题,"article":英语文章}}
+"""
+        try:
+            real_ip = self.real_ip_dict[task_id]
+            demo_name = self.demo_name[task_id]
+            r_json = json.loads(get_answer_from_gpt(q, temperature=0.8, json_resp=True,real_ip=real_ip,demo_name=demo_name))
+            print(f"调试信息2 {r_json}")
+            r_article = r_json.get("article")
+            r_title = r_json.get("title")
+            return r_title, r_article
+        except json.decoder.JSONDecodeError:
+            logger.error("gpt替换生词文章回复json格式化错误")
+        except Exception as e:
+            logger.error(f"gpt替换生词文章回复其他错误.{type(e).__name__} {e}")
+
+   
+    def run_get_article_task(self, words_meaning_str, task_id,student_stage,vocabulary) -> tuple:
+        """
+        :param vocabulary:
+        :param student_stage:
+        :param words_meaning_str: 数据库内查出来的单词和词义的拼接字符串
+        :param task_id: 文章任务id
+        :return: 标题,文章,句子翻译的字典
+        """
+
+        def get_article_chinese(title,r_article_sentences,task_id,code=0)-> tuple:
+            r_article_chinese_dict = self._get_article_chinese_dict(title, r_article_sentences, task_id)
+            chinese_str = "\n".join(r_article_chinese_dict.values())
+            r_article = "".join(r_article_sentences)
+
+            self.insert_article_to_mysql(title=r_title, article=r_article, chinese=chinese_str, task_id=task_id,code=code)
+            return r_title, r_article_sentences, r_article_chinese_dict
+
+       
+        r_title,r_article_sentences = self.__get_article(words_meaning_str,task_id,student_stage,vocabulary)
+
+        new_word_rate, new_words = self._calculate_new_word_rate(r_article_sentences)
+        if new_word_rate < 0.03:
+            return get_article_chinese(title=r_title, r_article_sentences=r_article_sentences, task_id=task_id)
+
+       
+        replace_article_gpt = "".join(r_article_sentences)
+        for i in range(3):
+            if tuple_data:=self.__replace_new_word(old_article=replace_article_gpt, new_words=new_words,task_id=task_id):
+                r_title,replace_article_gpt = tuple_data
+
+                new_word_rate, new_words = self._calculate_new_word_rate(replace_article_gpt)
+                if new_word_rate < 0.03 or i == 2:
+                    if i == 2:
+                        logger.warning(f"3次后生词率未到3%以下。task_id:{task_id}")
+                    return get_article_chinese(title=r_title,r_article_sentences=r_article_sentences,task_id=task_id)
+
+   
+    def split_article_make_json(self, task_id: int,title:str, r_article_sentences: list,r_article_chinese_dict:dict):
+
+       
+        article = "".join(r_article_sentences)
+        article = title + "\n\n" + article
+
+       
+        all_sentence_word_meaningid_dict = self.run_query_word_meaning(article,task_id)
+
+       
+        word_count = get_article_words_count(title+article)
+
+       
+        create_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+        outside_json_dict = {"id": task_id, "body": article, "wordCount": word_count, "paragraphs": [],
+                             "createTime": create_time}
+
+        article_paragraphs = article.split("\n\n")
+        article_sentence_count = 0
+        for paragraph in article_paragraphs:
+            sentences = split_text_to_sentences(paragraph) 
+
+            p = {"sentences": []}
+            for single_sentence in sentences:
+                article_sentence_count += 1
+                single_sentence_chinese = r_article_chinese_dict.get(single_sentence,"")
+               
+                w = {"words": [],"chinese":single_sentence_chinese}
+                split_words:list[str] = re.findall(r'\b[-\'\w]+\b|[^\w\s]', single_sentence) 
+
+               
+                for originale_word in split_words:
+                    single_word = originale_word
+                    if not originale_word:
+                        continue
+                    if not re.search(r'[a-zA-Z]', originale_word):
+                        w["words"].append({"spell": originale_word, "type": "punctuation"})
+                        continue
+
+                   
+                    word_id = self.get_wordid_by_wordspelling(originale_word)
+
+                    x_data, type_ = all_sentence_word_meaningid_dict.get(single_sentence + originale_word, [0,0])
+                    if type_ == 0: 
+                        single_word = originale_word.lower()
+                        x_data, type_ = all_sentence_word_meaningid_dict.get(single_sentence + single_word, [0,0])
+                        if type_ == 0: 
+                            single_word = word_to_prototype(single_word)
+                            x_data, type_ = all_sentence_word_meaningid_dict.get(single_sentence + single_word,[0,0])
+
+                    if type_ == 0:
+                        logger.warning(f"警告:type_还是0,那就是二次查询时,也没有给词义。有漏下的单词{originale_word}")
+                        continue
+
+                   
+                    if type_ == 1: 
+                        meaning_id = x_data
+                        meaning = self.get_meaning_by_meaningid(x_data)
+                    elif type_ == 2: 
+                        meaning_id = self._get_fake_meaningid(single_word)
+                        meaning = x_data
+                    else:
+                        logger.error(f"出错:未知的type_:{type_}")
+                        meaning_id = 9999999
+                        meaning = '无'
+
+                   
+                    word_prototype = word_to_prototype(originale_word)
+
+                    word_json = {"id": word_id, "meaningId": meaning_id,"meaning":meaning, "spell": originale_word,
+                                 "exchanges": get_word_exchange_list(word=single_word),"prototype": word_prototype}
+                    w["words"].append(word_json)
+
+                p["sentences"].append(w)
+
+            outside_json_dict["paragraphs"].append(p)
+
+        outside_json_dict["articleSentenceCount"] = article_sentence_count
+        return outside_json_dict,word_count,article_sentence_count
+
+   
+    def run_query_word_meaning(self, article,task_id):
+        futures = []
+        article_paragraphs = article.split("\n\n")
+
+        for paragraph in article_paragraphs:
+            sentences = split_text_to_sentences(paragraph)
+
+            for single_sentence in sentences:
+                f = pool_executor.submit(self.query_word_meaning_from_gpt, single_sentence,task_id)
+                futures.append(f)
+
+        wait(futures)
+        all_sentence_word_meaningid_dict = {}
+        for f in futures:
+            f_result = f.result() 
+            all_sentence_word_meaningid_dict.update(f_result)
+        return all_sentence_word_meaningid_dict
+
+   
+    def query_word_meaning_from_gpt(self, single_sentence,task_id) -> dict:
+        """single_sentence 提交单个句子"""
+        split_words = split_text_to_word(single_sentence)
+       
+        split_words = [word_to_prototype(w) for w in split_words if w]
+
+        placeholders = ', '.join(['%s'] * len(split_words))
+        sql = f"SELECT WordSpelling, Id, WordMeaning FROM dictionary_meaningitem WHERE WordSpelling IN ({placeholders})"
+        r = self.m.query_data(sql, split_words)
+        list_of_tuples = list(r)
+        sorted_list_of_tuples = sorted(list_of_tuples, key=lambda x: split_words.index(x[0]))
+
+        insert_question_data = OrderedDict()
+        for spelling, meaning_id, word_meaning in sorted_list_of_tuples:
+
+            if spelling not in insert_question_data:
+                insert_question_data[spelling] = [(meaning_id, word_meaning)]
+            else:
+                insert_question_data[spelling].append((meaning_id, word_meaning))
+
+        insert_question_data_list = [f"{spelling} 词义组:{data}" for spelling, data in insert_question_data.items()]
+        insert_question_data_str = "\n".join(insert_question_data_list)
+
+        q = f"""我会给你一个[英语句子]和[数据组],[数据组]由句子中的每个[固定单词]和[词义组]两部分组成,[词义组]又由多个(词义id,词义)组成。
+我需要你帮我根据[英语句子]的语境,挑选这个[固定单词]的词义,在对应的在词义组内词义最贴近的id。按示例回复。
+要求:
+1.不用考虑词性,只要和英语句子中的词义相近就行。一个固定单词只对应一个词义id。
+2.如果提供的[词义组]内没有句子对应的词义,返回id为0,例如:[固定单词] ** 0
+3.回复的每行由固定单词,id两个部分组成,每个部分中间用**分隔。
+4.所有固定单词都要回复,不要漏下。
+
+英语句子:{single_sentence}.
+数据组:\n{insert_question_data_str}
+
+回复示例:
+beauty ** 302816
+apple ** 234567
+"""
+
+        real_ip = self.real_ip_dict[task_id]
+        demo_name = self.demo_name[task_id]
+        r_gpt = get_answer_from_gpt(q,real_ip=real_ip,demo_name=demo_name)
+
+       
+        already_data,need_twice_data = {},[]
+
+        three_list = self._clean_gpt_res(single_sentence, r_gpt,split_words)
+
+        for sentence, spelling, meaning_id in three_list:
+           
+            if meaning_id == 0:
+                need_twice_data.append([sentence, spelling, meaning_id])
+            else:
+               
+                already_data[sentence + spelling] = [meaning_id,1]
+
+        for _, spelling, _ in need_twice_data:
+            need_twice_words = ",".join([spelling])
+            q2 = f"""我会给你一个英语句子,和句子中的几个单词。请给我这几个单词在句子中的中文词义。按示例回复json数据。
+    英语句子:{single_sentence}
+    单词:{need_twice_words}
+    
+    要求:
+    1.给到的单词都要回复其中文词义。
+    2.回复的json,以单词为键,它的中文词义为键。
+    
+    回复示例:
+    {{"单词":"中文词义",...}}
+    """
+            r2 = get_answer_from_gpt(q2,real_ip=real_ip,demo_name=demo_name,json_resp=True)
+            r2_json:dict = json.loads(r2)
+            for w_spelling,chinese_meaning in r2_json.items():
+                already_data[single_sentence + w_spelling] = [chinese_meaning,2]
+
+        return already_data
+
+   
+    def upload_json_file_to_oss(self,article_id:int,data_dict:dict):
+        json_data = json.dumps(data_dict, ensure_ascii=False)
+        object_name = f'study/article/{article_id}'
+        content = json_data.encode('utf-8') 
+        for _ in range(2):
+            try:
+                r = self.bucket.put_object(object_name, content)
+            except Exception as e:
+                logger.error(f"上传文件错误{type(e).__name__} {e},taskid:{article_id}")
+                continue
+            s = r.resp.status
+            if s == 200:
+                logger.success(f"上传oss成功 {article_id}")
+                return True
+        else:
+            logger.critical(f"2次上传oss错误,taskid:{article_id}")
+
+   
+    def notice_teach_system(self,article_id:int,class_id:int,word_count:int,article_sentence_count:int):
+        url = self.callback_url_dict.get(article_id)
+        if not url or "localhost/callback" in url:
+            return False
+
+        json_data = {"classId": class_id,"articleId": article_id,"articleWordCount": word_count,"articleSentenceCount": article_sentence_count}
+        for _ in range(3):
+            try:
+                r = requests.post(url,json=json_data)
+                r.raise_for_status()
+                self.callback_url_dict.pop(article_id,'')
+                logger.success(f"通知成功{r.text}")
+                return True
+            except Exception as e:
+                logger.warning(f"{type(e).__name__} {e}")
+
+        logger.critical(f"通知接口失败,三次全错. article_id:{article_id} callback_url:{url}")
+
+   
+    def clean_source(self,article_id):
+        self.callback_url_dict.pop(article_id, '')
+        self.real_ip_dict.pop(article_id, '')
+
+   
+    def run_task(self,words_meaning_str, task_id,student_stage,vocabulary,class_id):
+        try:
+            title,r_article_sentences,r_article_chinese_dict = self.run_get_article_task(words_meaning_str, task_id,student_stage,vocabulary)
+
+            outside_json_dict,word_count,article_sentence_count = self.split_article_make_json(task_id,title,r_article_sentences,r_article_chinese_dict)
+            self.upload_json_file_to_oss(article_id=task_id,data_dict=outside_json_dict)
+            self.notice_teach_system(article_id=task_id,class_id=class_id,word_count=word_count,article_sentence_count=article_sentence_count)
+            self.clean_source(article_id=task_id)
+            logger.success(f"文章任务完成。taskid:{task_id}")
+
+        except Exception as e:
+            logger.error(f"{type(e).__name__} {e}")
+            traceback_str = traceback.format_exc()
+            logger.error(f"外围错误追溯:{traceback_str}")

+ 271 - 0
gpt/get_article2.py

@@ -0,0 +1,271 @@
+# -*- coding: utf-8 -*-
+
+from gpt.chatgpt import get_answer_from_gpt
+from gpt.gpt_check import CheckGptAnswer,CheckArticleResult
+from tools.new_mysql import MySQLUploader
+from tools.loglog import logger, log_err_e
+from tools.thread_pool_manager import pool_executor
+from common.common_data import all_exchange_words
+from common.split_text import split_text_to_word
+
+from pydantic import BaseModel
+from cachetools import TTLCache
+from concurrent.futures import wait
+from random import randint, shuffle
+import json
+import requests
+from openpyxl import load_workbook
+from tenacity import retry, stop_after_attempt, wait_fixed
+
+
+def get_article_difficulty(article) -> int:
+    """获取文章的难度值"""
+    url = "http://qbank.yunzhixue.cn/api/article/analysis"
+
+    data = {"body": article, "question": ""}
+    try:
+        response = requests.post(url, json=data)
+    except Exception as e:
+        log_err_e(e, msg="获取文章难度值;")
+        return 0
+
+    if response.status_code == 200:
+        difficult_value = response.json()['data']['difficult']
+        return difficult_value
+    else:
+        logger.error(f"错误状态码{response.status_code}")
+
+
+def find_interval(number):
+    """
+    判断一个数字属于哪个难度等级区间。31级是例外情况,需要排查
+
+    :param number: 要检查的数字。
+    :return: 返回包含该数字的区间,如果没有找到,则返回 None。
+    """
+    intervals = [(1, 200), (201, 250), (251, 300), (301, 350), (351, 400), (401, 450), (451, 550), (551, 650), (651, 750), (751, 850), (851, 950),
+                 (951, 1100),
+                 (1101, 1250), (1251, 1400), (1401, 1550), (1551, 1700), (1701, 1900), (1901, 2100), (2101, 2300), (2301, 2600), (2601, 2900),
+                 (2901, 3200),
+                 (3201, 3500), (3501, 3900), (3901, 4300), (4301, 4700), (4701, 5100), (5101, 5500), (5501, 5900), (5901, 6500), (6501, 99999)]
+    for index, (start, end) in enumerate(intervals, start=1):
+        if start <= number <= end:
+            return index
+    logger.error(f"文章难度判断不对:{number}")
+    return 0
+
+
+def merge_and_split(list1, list2):
+    combined = list1 + list2
+    import random
+    random.shuffle(combined)
+
+   
+    two_thirds = []
+    one_third = []
+
+   
+    total_length = len(combined)
+    if total_length>15:
+        two_thirds = combined[:15]
+        one_third = combined[15:]
+    else:
+        two_thirds = combined
+        one_third = []
+
+    return two_thirds, one_third
+
+
+class GetArticle:
+    def __init__(self):
+        self.m = MySQLUploader()
+
+        self.callback_url_dict = {}
+        self.real_ip_dict = {} 
+        self.demo_name = {}
+
+        self.article_result = {} 
+
+       
+        self.punctuation = [",", ".", "!", "?", ":", ";", '"', "–", "_", "-", "...", "......"]
+        all_exchange_words.update(self.punctuation)
+
+       
+        self.exchange_data: dict[str, list] = {} 
+        self.read_spring_bamboo_exchange_table()
+
+   
+    def read_spring_bamboo_exchange_table(self):
+        """变形是键,原型是值"""
+        wb = load_workbook(r"data/春笋单词对照变形.xlsx", read_only=True, data_only=True)
+        ws = wb.active
+        for row in ws.values:
+            prototype = row[0] 
+            exchange = row[1] 
+            if prototype not in self.exchange_data:
+                self.exchange_data[prototype] = [exchange]
+            else:
+                self.exchange_data[prototype].append(exchange)
+        wb.close()
+
+   
+    def parser_insert_to_mysql(self, resp_result):
+        for single_article in resp_result['articles']:
+           
+            article = single_article['body']
+            article_json = json.dumps(single_article)
+            difficult_value = find_interval(get_article_difficulty(article)) 
+            if not difficult_value:
+                logger.error("文章难度等级为0;")
+            sql = "INSERT INTO spring_bamboo_article (article_json,difficult_level) VALUES (%s,%s)"
+            self.m.execute_(sql, (article_json, difficult_value))
+
+   
+    def submit_task(self, core_words: list, extend_words: list, take_count: int, student_stage: int, real_ip: str, demo_name: str):
+        """
+        words_meaning_list: 词义id 包含词义ID的数组集合,用于生成文章。- 示例:[110, 111, 112, 113, 114]
+        take_count: 取文章数量 (int类型,正常是2篇,最大8篇)
+        student_stage: 学段(int类型:1.小学;2.初中;3.高中;)
+        demo_name: 项目名称
+        """
+        task_id = randint(10000000, 99999999)
+       
+        logger.info(f"生成文章id。task_id:{task_id}")
+
+        self.real_ip_dict[task_id] = real_ip
+        self.demo_name[task_id] = demo_name
+
+        try:
+            resp_result = self.run_task(core_words, extend_words, task_id, take_count, student_stage)
+            self.parser_insert_to_mysql(resp_result) 
+            return resp_result
+        except Exception as e:
+            err_msg = f"GetArticle提交任务失败{type(e).__name__},{e}"
+            log_err_e(e, msg="GetArticle提交任务失败;")
+
+            return err_msg
+
+   
+    @retry(stop=stop_after_attempt(2), wait=wait_fixed(3), reraise=True)
+    def get_article(self, core_words: list, extend_words: list, student_stage: int, task_id: int, take_count: int) -> dict:
+        diffculty_control = {
+            1: {"grade": "小学", "article_word_count": 60, "desc_difficulty": "最简单最容易没有难度", "paragraph_count": 1,
+                "desc2": "文章整体非常简洁,通俗易懂,适合初学者,刚入门,单词全是最常见的,语句通顺即可。",
+                "choice_desc": "选择题难度尽可能简单,但是不要让所有选择题让其直接在文中找到答案,允许1-2个选择题很简单,参考中国小学生水平"},
+            2: {"grade": "初中", "article_word_count": 300, "desc_difficulty": "简单、常见、难度低", "paragraph_count": 3,
+                "desc2": "文章整体难度适中,大约和中国初中生,中国CET-3,雅思4分这样的难度标准。",
+                "choice_desc": "选择题难度适中,但是不要所有选择题让其直接在文中找到答案,参考中国初中生水平,中考标准。"},
+            3: {"grade": "高中", "article_word_count": 600, "desc_difficulty": "常见、高中难度的", "paragraph_count": 5,
+                "desc2": "文章整体难度适中,大约和中国的高中生,中国CET-6,雅思6分这样的难度标准。",
+                "choice_desc": "选择题难度偏难,要有迷惑性混淆性,答案不要出现直接在文中,4个选项要学生推理或逻辑判断,参考中国高中生水平,高考标准。"}
+        }
+        take_count_dict = {0: "", 1: "一", 2: "二", 3: "三", 4: "四", 5: "五", 6: "六", 7: "七", 8: "八", 9: "九"}
+        different_cou = take_count_dict.get(take_count, "")
+
+        grade = diffculty_control[student_stage]["grade"] 
+        select_word_count = diffculty_control[student_stage]["article_word_count"] 
+        select_diffculty = diffculty_control[student_stage]["desc_difficulty"] 
+        select_paragraph_count = diffculty_control[student_stage]["paragraph_count"] 
+        desc2 = diffculty_control[student_stage]["desc2"]
+        choice_desc = diffculty_control[student_stage]["choice_desc"] 
+
+       
+        shuffle(core_words)
+        core_words_meaning_str = ";".join([str(i['meaning_id']) + ' ' + i["spell"] + ":" + i["meaning"] for i in core_words])
+        extend_words_meaning_str = ";".join([str(i['meaning_id']) + ' ' + i["spell"] + ":" + i["meaning"] for i in extend_words])
+
+        no_escape_code = r"\\n\\n"
+        json_model = r'{"difficultSentences":[{"english":"string","chinese":"string"}],"usedMeanIds":[0,0,0],"englishArticle":"string","chineseArticle":"string","questions":[{"trunk":"string","analysis":"string","candidates":[{"label":"string","text":"string","isRight":0}]}]}'
+        sys_prompt = "你是一个专业的英语老师,擅长根据用户提供的词汇生成对应的英语文章和中文翻译和4个配套选择题。"
+        q = f"""下面我会为你提供两组数据,[单词组1]和[单词组2](里面包含词义id,英语单词,中文词义),优先使用[单词组1]内的单词,请根据这些单词的中文词义,\
+生成一篇带中文翻译的考场英语文章,英语文章和中文翻译要有[标题]。注意这个单词有多个词义时,生成的英语文章一定要用提供的中文词义。并挑选一句复杂的句子和其中文翻译,放入difficultSentences。\
+英语文章,放入"englishArticle"中。中文翻译,放入"chineseArticle"中。最终文中使用到的单词id放入"usedMeanIds"中。\
+4个选择题,放入questions字段。questions结构下有4个选择题对象,其中trunk是[英语]问题文本,analysis是[中文]的问题分析,candidates是4个ABCD选项,内部有label是指选项序号A B C D ,text是[英语]选项文本,isRight是否正确答案1是正确0是错误。
+
+要求:
+1.必须用提供的这个词义的单词,其他单词使用{select_diffculty}的单词。{desc2}
+2.优先保证文章语句通顺,意思不要太生硬。不要为了使用特定的单词,造成文章语义前后不搭,允许不使用个别词义。
+3.文章中使用提供单词,一定要和提供单词的中文词义匹配,尤其是一词多义时,务必使用提供单词的词义。必须要用提供单词的词义。如果用到的词义与提供单词词义不一致,请不要使用这个单词。
+4.生成的文章要求{select_word_count}词左右,可以用{no_escape_code}字符分段,一般{select_paragraph_count}个段落左右。第一段是文章标题。
+5.生成文章优先使用[单词组1]的词义,其次可以挑选使用[单词组2]的词义。允许不使用[单词组1]的个别单词,优先保证文章整体意思通顺连贯和故事完整。
+6.回复紧凑无空格的json数据,示例:{json_model}
+
+提供[单词组1]:{core_words_meaning_str};
+提供[单词组2]:{extend_words_meaning_str};
+"""
+        try:
+            real_ip = self.real_ip_dict[task_id]
+            demo_name = self.demo_name[task_id]
+            r_json = json.loads(get_answer_from_gpt(q, temperature=1, json_resp=True, real_ip=real_ip, demo_name=demo_name,model='gpt-4.1',
+                                                    check_fucn=CheckArticleResult.get_article_1, max_tokens=8000, sys_prompt=sys_prompt))
+
+
+
+           
+            allWordAmount = 0
+            allWordAmount += len(split_text_to_word(r_json["englishArticle"]))
+            for i in r_json["questions"]:
+                count_trunk = len(split_text_to_word(i["trunk"]))
+                count_candidates = sum([len(split_text_to_word(ii["text"])) for ii in i["candidates"]])
+                allWordAmount += count_trunk
+                allWordAmount += count_candidates
+
+           
+            usedMeanIds: list = r_json['usedMeanIds'] 
+           
+            article_words = split_text_to_word(r_json['englishArticle'])
+           
+            for i in core_words + extend_words:
+                meaning_id = i.get('meaning_id', 0)
+                if not meaning_id:
+                    continue
+                word = i["spell"]
+                if meaning_id not in usedMeanIds and word in self.exchange_data: 
+                    words_exchanges_list = self.exchange_data[word]
+                    for exchange_word in words_exchanges_list:
+                        if exchange_word in article_words:
+                            usedMeanIds.append(meaning_id)
+                            break
+
+           
+            r_json["body"] = r_json.pop("englishArticle")
+            r_json["chinese"] = r_json.pop("chineseArticle")
+
+            return {**r_json, "allWordAmount": allWordAmount}
+        except json.decoder.JSONDecodeError:
+            logger.error("gpt生成文章回复json格式化错误")
+            raise
+        except Exception as e:
+            logger.error(f"gpt生成文章回复其他错误.{type(e).__name__} {e}")
+            raise
+
+   
+    def run_get_article_task(self, core_words, extend_words, task_id, take_count, student_stage) -> dict:
+        """
+        :param core_words: 核心单词数据,优先级1;可能为空
+        :param extend_words: 扩展单词数据,优先级2;可能为空
+        :param task_id: 任务id
+        :param take_count: 文章数量
+        :param student_stage: 学段标识,整型,123
+        :return:
+        """
+        futures = []
+        for i in range(take_count):
+            futures.append(pool_executor.submit(self.get_article, core_words, extend_words, student_stage, task_id, take_count))
+        wait(futures)
+        return_json = {"articles": []}
+        for t in futures:
+            return_json["articles"].append(t.result())
+        return return_json
+
+   
+    def run_task(self, core_words, extend_words, task_id, take_count, student_stage):
+        try:
+            outside_json = self.run_get_article_task(core_words, extend_words, task_id, take_count, student_stage)
+            logger.success(f"文章2任务完成。taskid:{task_id}\n{outside_json}")
+            return outside_json
+        except Exception as e:
+            logger.error(f"{type(e).__name__} {e}")
+        finally:
+            self.real_ip_dict.pop(task_id)
+            self.demo_name.pop(task_id)

+ 103 - 0
gpt/gpt.py

@@ -0,0 +1,103 @@
+# -*- coding:utf-8 -*-
+if __name__ == '__main__':
+    import os
+    os.chdir("..")
+
+import requests
+import random
+import time
+from tools.loglog import logger,simple_logger
+from tools.new_mysql import MySQLUploader
+
+m = MySQLUploader()
+
+
+def insert_ip_token(ip,demo_name,gpt_content,prompt_tokens,completion_tokens,total_tokens):
+    sql = "insert into consumer_token (ip,demo_name,gpt_content,prompt_tokens,completion_tokens,total_tokens) values (%s,%s,%s,%s,%s,%s)"
+    m.execute_(sql,(ip,demo_name,str(gpt_content),prompt_tokens,completion_tokens,total_tokens))
+
+def get_answer_from_gpt(question,real_ip="localhost",demo_name="无",model="gpt-4o",max_tokens=3500,temperature:float=0,json_resp=False,n=1,sys_prompt=None):
+   
+   
+   
+
+    if "3.5" in model or "3.5-turbo" in model or "3.5turbo" in model:
+        model = "gpt-3.5-turbo"
+    elif "4o" in model or "gpt4o" in model:
+        model = "gpt-4o"
+    elif "4turbo" in model or "4-turbo" in model:
+        model = "gpt-4-turbo"
+
+   
+    d2 = {
+    "model": model,
+    "messages": [],
+    "max_tokens": max_tokens,
+    "temperature": temperature,
+    'n': n}
+
+    if sys_prompt:
+        d2['messages'].append({"role": "system", "content": sys_prompt})
+    d2['messages'].append({"role": "user", "content": question})
+
+
+    if json_resp is True:
+        d2["response_format"] = {"type": "json_object"}
+    elif json_resp is False:
+        pass
+    else:
+        d2["response_format"] = json_resp
+
+    for _ in range(3):
+        try:
+           
+            response = requests.post(f'http://170.106.108.95/v1/chat/completions', json=d2)
+            r_json = response.json()
+            if r2:= r_json.get("choices",None):
+                if n>1:
+                    gpt_res = []
+                    for i in r2:
+                        gpt_res.append(i["message"]["content"])
+                else:
+                    gpt_res= r2[0]["message"]["content"]
+
+               
+                gpt_content = str(gpt_res)
+                prompt_tokens = r_json["usage"]["prompt_tokens"]
+                completion_tokens = r_json["usage"]["completion_tokens"]
+                total_tokens = r_json["usage"]["total_tokens"]
+                insert_ip_token(real_ip,demo_name,gpt_content,prompt_tokens,completion_tokens,total_tokens)
+
+                simple_logger.info(f"问题日志:\n{question}\n回答日志:\n{gpt_res}")
+                return gpt_res
+            elif r_json.get("message") == "IP address blocked":
+                print("IP address blocked")
+                raise Exception("IP address blocked")
+            else:
+                print(f"小错误:{question[:10]}")
+                logger.error(response.text)
+        except Exception as e:
+            logger.info(f"小报错忽略{e}")
+        time.sleep(10)
+
+    logger.critical("get_answer_from_gpt 严重错误,3次后都失败了")
+
+
+
+def parse_gpt_phon_to_tuplelist(text:str) -> list:
+    """解析gpt返回的音标数据"""
+    result = []
+    if not text:
+        return []
+    for i in text.split("\n"):
+        ii = i.split("***")
+        if len(ii)>=3:
+            result.append((ii[0].strip(),ii[1].strip(),ii[2].strip()))
+    return result
+
+
+if __name__ == '__main__':
+    pass
+
+   
+   

+ 343 - 0
gpt/gpt_check.py

@@ -0,0 +1,343 @@
+# -*- coding:utf-8 -*-
+"""
+校验模块
+GPT回复的各个校验模块"""
+import json
+import re
+
+
+class CheckGptAnswer:
+    @staticmethod
+    def default_no_check(gpt_text: str):
+       
+        return True
+
+   
+    @staticmethod
+    def score_value(gpt_text: str):
+       
+        if gpt_text.count("【取值0】") > 1:
+            return False
+        return True if re.findall("【取值.+?】", gpt_text) else False
+
+   
+    @staticmethod
+    def original_modify(gpt_text: str):
+        split_text = gpt_text.split("\n")
+        for t in split_text:
+           
+            if "修改理由" in t and "错误" in t and len(t)<=25:
+                return False
+            elif "没有严重的语法错误" in t:
+                return False
+
+        if "【原句】" in gpt_text and "【修改后】" in gpt_text:
+            return True
+        else:
+            return False
+
+   
+    @staticmethod
+    def count_chinese_characters_50(s: str):
+        chinese_count = 0
+        for char in s:
+           
+            if '\u4e00' <= char <= '\u9fff':
+                chinese_count += 1
+        return True if s and chinese_count/len(s) >= 0.5 else False
+
+   
+    @staticmethod
+    def count_english_count_30(s: str,english_words_count=30):
+        words_count = len(re.findall(r"[a-zA-Z\']+",s))
+        return True if words_count >= english_words_count else False
+
+   
+    @staticmethod
+    def count_letter_percentages(s:str,letter_percentages=0.8):
+        count_letter=0
+       
+        total_length = len(s)
+
+       
+        for char in s:
+           
+            if char.isalpha():
+               
+                count_letter += 1
+        result = True if round(count_letter/total_length,2)>letter_percentages else False
+        return result
+
+
+class CheckArticleResult:
+    @staticmethod
+    def default_no_check(gpt_text: str):
+       
+        return True
+
+    @staticmethod
+    def get_article_1(gpt_text: str):
+       
+        try:
+            json_object = json.loads(gpt_text)
+        except json.decoder.JSONDecodeError:
+            return False
+       
+        if not all(i in json_object for i in ["englishArticle","chineseArticle","difficultSentences","usedMeanIds","questions"]):
+            return False
+        return True
+
+if __name__ == '__main__':
+   
+   
+   
+   
+   
+
+    text = """{
+  "difficultSentences": [
+    {
+      "english": "However, even on his sick days, James carries a spirit of courage and never lets himself rest for too long.",
+      "chinese": "然而,即使在他生病的日子里,詹姆斯仍带着勇气,从未让自己放松太久。"
+    }
+  ],
+  "usedMeanIds": [749, 1945, 1597, 1953, 2038, 2162, 1625],
+  "englishArticle": "Overcoming Illness with Courage\n\nJames has always been an active person. Whether it is running in the park, playing basketball, or hiking in the mountains, he never likes to rest. However, last week, he suddenly felt a horrible headache. Alongside the headache, he experienced toothache and fever. The illness made him sick and he had to lie in bed. Not only was he unable to move around freely, but he also had to place an ice pack on his knee and neck to ease the pain.\n\nHis family was worried and thought it would be a serious matter. His mother sat beside him, asking if he needed a break or any help. James, showing his spirit of courage, assured her that he would recover soon. He understood that even when life presents challenges, they must be faced with bravery.\n\nHowever, even on his sick days, James carries a spirit of courage and never lets himself rest for too long. With each passing day, he begins to feel a bit better. The fever decreases, and his headaches become less frequent. Soon, he looks forward to the day he can return to his active lifestyle, encouraged by the fact that he fought his illness with bravery and determination.",
+  "chineseArticle": "勇敢地战胜疾病\n\n詹姆斯一直是个活跃的人。无论是在公园跑步,打篮球,还是爬山,他从不喜欢放松。然而,上周他突然感到头痛欲裂。除了头痛外,他还感到牙痛和发烧。这场病让他生病了,他不得不躺在床上。他不仅无法自由活动,还得在膝盖和颈部放冰袋来缓解疼痛。\n\n他的家人很担心,觉得这会是个严重的问题。他的母亲坐在他旁边,询问他是否需要间歇或者帮忙。詹姆斯展现了他的勇气,向她保证他会很快康复。他明白,即使生活给出挑战,也必须勇敢面对。\n\n然而,即使在他生病的日子里,詹姆斯仍带着勇气,从未让自己放松太久。随着每一天的过去,他开始感觉好一点了。发烧开始下降,头痛也减少了。他期待着能回到他积极的生活方式,鼓舞于他凭借勇气和决心战胜了疾病。",
+  "questions": [
+    {
+      "trunk": "What activities does James enjoy?",
+      "analysis": "根据文章,詹姆斯喜欢活跃的生活方式,比如跑步、打篮球和登山。",
+      "candidates": [
+        {
+          "label": "A",
+          "text": "Swimming",
+          "isRight": 0
+        },
+        {
+          "label": "B",
+          "text": "Running, basketball, and hiking",
+          "isRight": 1
+        },
+        {
+          "label": "C",
+          "text": "Reading books",
+          "isRight": 0
+        },
+        {
+          "label": "D",
+          "text": "Painting",
+          "isRight": 0
+        }
+      ]
+    },
+    {
+      "trunk": "How did James's family react to his illness?",
+      "analysis": "文章中提到,詹姆斯的家人担心他的健康情况。",
+      "candidates": [
+        {
+          "label": "A",
+          "text": "They were indifferent",
+          "isRight": 0
+        },
+        {
+          "label": "B",
+          "text": "They were worried",
+          "isRight": 1
+        },
+        {
+          "label": "C",
+          "text": "They laughed",
+          "isRight": 0
+        },
+        {
+          "label": "D",
+          "text": "They scolded him",
+          "isRight": 0
+        }
+      ]
+    },
+    {
+      "trunk": "What symptoms did James experience?",
+      "analysis": "根据文章,詹姆斯有头痛、牙痛和发烧等症状。",
+      "candidates": [
+        {
+          "label": "A",
+          "text": "Coughing and sneezing",
+          "isRight": 0
+        },
+        {
+          "label": "B",
+          "text": "Headache, toothache, and fever",
+          "isRight": 1
+        },
+        {
+          "label": "C",
+          "text": "Stomach ache",
+          "isRight": 0
+        },
+        {
+          "label": "D",
+          "text": "Sore throat",
+          "isRight": 0
+        }
+      ]
+    },
+    {
+      "trunk": "How did James approach his recovery?",
+      "analysis": "詹姆斯通过保持勇气和决心来对待他的康复,并没有让自己放松太久。",
+      "candidates": [
+        {
+          "label": "A",
+          "text": "With defeat",
+          "isRight": 0
+        },
+        {
+          "label": "B",
+          "text": "With carelessness",
+          "isRight": 0
+        },
+        {
+          "label": "C",
+          "text": "With courage and determination",
+          "isRight": 1
+        },
+        {
+          "label": "D",
+          "text": "With anger",
+          "isRight": 0
+        }
+      ]
+    }
+  ]
+}"""
+    text2 = r"""{
+    "difficultSentences": [
+        {
+            "english": "Even when she was feeling sick, she showed great spirit and continued her project with the same dedication.",
+            "chinese": "即使她感觉生病了,她仍然展示出极大的勇气,继续以同样的奉献精神进行她的项目。"
+        }
+    ],
+    "usedMeanIds": [
+        1945,
+        1953,
+        1816,
+        544,
+        1453,
+        2038,
+        1597,
+        1625
+    ],
+    "englishArticle": "A Day with Unexpected Challenges\n\nIt was a regular Friday morning when Emily woke up with a headache. The thought of going to school was discomforting, but she decided to push through. Emily dressed slowly, feeling each movement in her knee as she descended the stairs for breakfast. Her mother noticed her pale face and asked if she was alright. 'I have a bad headache and a sore neck,' Emily replied. Her mother suggested she might be coming down with a fever and checked her temperature.\n\nDespite feeling under the weather, Emily showed great spirit and chose to attend school due to an important math test. When she reached the classroom, her friends greeted her with smiles, but Emily felt too sick to smile back. During the test, every question seemed to be a major problem. Her vision blurred as the headache intensified, and she wondered if she could finish the test. She reminded herself to stay focused, even as she felt increasingly feverish.\n\nAfter the test, Emily took a break and went to the nurse's office to rest. The nurse noticed her flushed skin and lethargic demeanor. 'You need to rest and possibly see a doctor,' the nurse advised, noting that a toothache could also be contributing to her discomfort. As Emily lay on the cot, she felt thankful for the brief respite from the classroom chaos. She knew that her health mattered more than anything else. The day, though challenging, taught Emily the importance of listening to her body and asking for help if necessary.",
+    "chineseArticle": "一个意想不到挑战的一天\n\n这是一个普通的星期五早晨,艾米莉醒来时头痛。想到要去学校就让人难受,但她决定继续坚持。艾米莉慢慢穿好衣服,感受到在她下楼吃早餐时每一步膝盖的酸痛。她的母亲注意到她脸色苍白,问她是不是不舒服。“我头很痛,脖子也痛,”艾米莉回答。她的母亲建议她可能是发烧了,并检查了她的体温。\n\n尽管身体不适,艾米莉展示了极大的勇气,选择去上学,因为那天有一场重要的数学考试。当她到达教室时,她的朋友们以微笑迎接她,但艾米莉觉得太不舒服了,无法回应微笑。在考试期间,每一个问题似乎都成了一个大问题。她的视线模糊,因为头痛加剧,她怀疑自己是否能完成考试。即使感到愈发发烧,她也提醒自己要保持专注。\n\n考试结束后,艾米莉休息片刻,去了校医室休息。校医注意到她泛红的皮肤和疲惫的样子。“你需要休息,可能需要看医生,”校医建议,并指出牙痛也可能是她不适的原因。当艾米莉躺在床上时,她感激不已,感谢能从课堂的混乱中得到片刻的喘息。她知道健康比什么都重要。这一天虽然充满挑战,却教会了艾米莉聆听自己身体的重要性,并在必要时寻求帮助。",
+    "questions": [
+        {
+            "trunk": "What was Emily's main challenge during the day?",
+            "analysis": "根据文章,Emily主要面对的挑战是身体不适。",
+            "candidates": [
+                {
+                    "label": "A",
+                    "text": "Completing her math test",
+                    "isRight": 0
+                },
+                {
+                    "label": "B",
+                    "text": "Feeling unwell with a headache and fever",
+                    "isRight": 1
+                },
+                {
+                    "label": "C",
+                    "text": "Greeting her friends",
+                    "isRight": 0
+                },
+                {
+                    "label": "D",
+                    "text": "Attending a birthday party",
+                    "isRight": 0
+                }
+            ]
+        },
+        {
+            "trunk": "Why did Emily decide to go to school?",
+            "analysis": "她去了学校是因为有重要的数学考试。",
+            "candidates": [
+                {
+                    "label": "A",
+                    "text": "To see her friends",
+                    "isRight": 0
+                },
+                {
+                    "label": "B",
+                    "text": "Because of an important math test",
+                    "isRight": 1
+                },
+                {
+                    "label": "C",
+                    "text": "To visit the nurse",
+                    "isRight": 0
+                },
+                {
+                    "label": "D",
+                    "text": "To join a sports event",
+                    "isRight": 0
+                }
+            ]
+        },
+        {
+            "trunk": "What did the nurse advise Emily to do?",
+            "analysis": "校医建议Emily应该休息并可能看医生。",
+            "candidates": [
+                {
+                    "label": "A",
+                    "text": "Return to class",
+                    "isRight": 0
+                },
+                {
+                    "label": "B",
+                    "text": "Rest and possibly see a doctor",
+                    "isRight": 1
+                },
+                {
+                    "label": "C",
+                    "text": "Take more classes",
+                    "isRight": 0
+                },
+                {
+                    "label": "D",
+                    "text": "Continue the test",
+                    "isRight": 0
+                }
+            ]
+        },
+        {
+            "trunk": "What lesson did Emily learn from her experience?",
+            "analysis": "文章最后提到Emily意识到健康的重要性。",
+            "candidates": [
+                {
+                    "label": "A",
+                    "text": "To finish tests quickly",
+                    "isRight": 0
+                },
+                {
+                    "label": "B",
+                    "text": "The importance of health and asking for help",
+                    "isRight": 1
+                },
+                {
+                    "label": "C",
+                    "text": "To study more often",
+                    "isRight": 0
+                },
+                {
+                    "label": "D",
+                    "text": "How to greet friends properly",
+                    "isRight": 0
+                }
+            ]
+        }
+    ]
+}"""
+    json_text = json.loads(text2)
+    print(json_text)
+   
+   

+ 31 - 0
gpt/query_oss_file.py

@@ -0,0 +1,31 @@
+# -*- coding: UTF-8 -*-
+if __name__ == '__main__':
+    import os
+    os.chdir("..")
+
+from tools.loglog import logger
+import oss2
+from oss2.credentials import EnvironmentVariableCredentialsProvider
+import json
+
+def query_file_content(key):
+    """
+    :param key: OSS的键;key = 'study/article/14138566'
+    :return: 文章的json
+    """
+    auth = oss2.ProviderAuth(EnvironmentVariableCredentialsProvider())
+    bucket = oss2.Bucket(auth, 'oss-cn-hangzhou.aliyuncs.com', 'qingti-private')
+    try:
+        object_stream = bucket.get_object(key)
+        content = b''.join(object_stream)
+       
+        text_content = content.decode('utf-8')
+        json_content = json.loads(text_content)
+        return json_content
+    except oss2.exceptions.NoSuchKey as e:
+        return 0
+    except Exception as e:
+        logger.error(f"{type(e).__name__}: {e}")
+
+if __name__ == '__main__':
+    print(query_file_content('study/article/10613145')["id"])

+ 68 - 0
main.py

@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+import time
+from threading import Thread
+
+from fastapi import FastAPI,Request
+from fastapi.responses import PlainTextResponse
+from typing import Callable
+from core.api_get_article import router as r1
+from core.api_get_audio import router as r2
+from core.api_get_article2 import router as r3
+from core.api_get_word import router as r4
+from core.api_get_spoken_language import router as r5
+from core.api_get_article3 import router as r6
+
+from tools.loglog import logger
+from tools.del_expire_file import run_del_normal
+from core.respone_format import *
+
+app = FastAPI(title="AI相关功能接口", version="1.1")
+
+app.include_router(r2, tags=["用户管理"])
+app.include_router(r1, tags=["青提文章管理"])
+app.include_router(r2, tags=["音频管理"])
+app.include_router(r3, tags=["春笋文章管理"])
+app.include_router(r4, tags=["生成word文档"])
+app.include_router(r5, tags=["口语评测"])
+app.include_router(r6, tags=["deepseek文章"])
+
+@app.middleware("http")
+async def add_process_time_header(request: Request, call_next: Callable):
+    start_time = time.time()
+    real_ip = request.headers.get("X-Real-IP")
+    params = request.query_params
+    path = request.url.path
+    try:
+        body = await request.json() if request.method in ["POST", "PUT", "PATCH"] else ""
+    except:
+        body =""
+    logger.info(f"\n正式接口请求:{real_ip} {request.method} {path}\n查询参数:{params}\n携带参数:{body}")
+
+    try:
+        response = await call_next(request)
+    except Exception as e:
+        logger.error(f"{type(e).__name__},{e}")
+        return resp_500(message=f"{type(e).__name__},{e}")
+
+    process_time = str(round(time.time() - start_time, 2))
+    response.headers["X-Process-Time"] = process_time
+
+   
+    if path not in ['/','/tts']:
+        with open('log/time_log.txt', encoding='utf-8', mode='a')as f:
+            t = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+            f.write(f"{t}  路径:{path} - 用时:{process_time}\n")
+    return response
+
+@app.get("/")
+@app.post("/")
+def hello():
+    return PlainTextResponse("hello world")
+
+del_file_thread = Thread(target=run_del_normal, daemon=True)
+del_file_thread.start()
+
+
+if __name__ == "__main__":
+    import uvicorn
+    uvicorn.run("main:app",host="0.0.0.0", port=8000)

+ 68 - 0
main_9000.py

@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+import time
+from threading import Thread
+
+from fastapi import FastAPI,Request
+from fastapi.responses import PlainTextResponse
+from typing import Callable
+from core.api_get_article import router as r1
+from core.api_get_audio import router as r2
+from core.api_get_article2 import router as r3
+from core.api_get_word import router as r4
+from core.api_get_spoken_language import router as r5
+from core.api_get_article3 import router as r6
+
+from tools.loglog import logger
+from tools.del_expire_file import run_del_normal
+from core.respone_format import *
+
+app = FastAPI(title="AI相关功能接口", version="1.1")
+
+app.include_router(r2, tags=["用户管理"])
+app.include_router(r1, tags=["青提文章管理"])
+app.include_router(r2, tags=["音频管理"])
+app.include_router(r3, tags=["春笋文章管理"])
+app.include_router(r4, tags=["生成word文档"])
+app.include_router(r5, tags=["口语评测"])
+app.include_router(r6, tags=["deepseek文章"])
+
+@app.middleware("http")
+async def add_process_time_header(request: Request, call_next: Callable):
+    start_time = time.time()
+    real_ip = request.headers.get("X-Real-IP")
+    params = request.query_params
+    path = request.url.path
+    try:
+        body = await request.json() if request.method in ["POST", "PUT", "PATCH"] else ""
+    except:
+        body =""
+    logger.info(f"\n测试接口请求:{real_ip} {request.method} {path}\n查询参数:{params}\n携带参数:{body}")
+
+    try:
+        response = await call_next(request)
+    except Exception as e:
+        logger.error(f"{type(e).__name__},{e}")
+        return resp_500(message=f"{type(e).__name__},{e}")
+
+    process_time = str(round(time.time() - start_time,2))
+    response.headers["X-Process-Time"] = process_time
+
+   
+    if path not in ['/','/tts']:
+        with open('log/time_log.txt', encoding='utf-8', mode='a')as f:
+            t = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+            f.write(f"{t}  路径:{path} - 用时:{process_time}\n")
+    return response
+
+@app.get("/")
+@app.post("/")
+def hello():
+    return PlainTextResponse("hello world")
+
+del_file_thread = Thread(target=run_del_normal, daemon=True)
+del_file_thread.start()
+
+
+if __name__ == "__main__":
+    import uvicorn
+    uvicorn.run("main_9000:app", port=9000)

File diff suppressed because it is too large
+ 16 - 0
make_docx_demo/check_test_table/aaaaaaaaaa.py


+ 39 - 0
make_docx_demo/check_test_table/baidu_ocr.py

@@ -0,0 +1,39 @@
+# -*- coding:utf-8 -*-
+import base64
+import requests
+import time
+
+access_token = None
+token_time = 0
+
+
+def high_ocr_location(pic_path):
+    """
+    通用文字识别(高精度含位置版)
+    """
+    global access_token, token_time
+    with open(pic_path, 'rb') as f:
+        img = base64.b64encode(f.read())
+
+    if time.time()-token_time>3600*8:
+        print("获取token啦")
+        url_token = 'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=BaL3yDflxe7Z5001vF8rAzKu&client_secret=xs40HshFLDDyWgCCfgnz86zWhQ8X1s5f'
+        token = requests.post(url_token).json()
+       
+        access_token = token['access_token']
+        token_time = time.time()
+
+    request_url = "https://aip.baidubce.com/rest/2.0/ocr/v1/accurate"
+
+    params = {"image": img,"recognize_granularity":"small"}
+    request_url = request_url + "?access_token=" + access_token
+    headers = {'content-type': 'application/x-www-form-urlencoded'}
+    response = requests.post(request_url, data=params, headers=headers)
+    if response:
+        r_json = response.json()
+       
+        return r_json
+
+
+if __name__ == '__main__':
+    print(high_ocr_location(r"C:\Users\86131\Desktop\4.jpg"))

+ 4 - 0
make_docx_demo/check_test_table/check_table.py

@@ -0,0 +1,4 @@
+# -*- coding:utf-8 -*-
+"""
+筛查表检测模块
+"""

+ 388 - 0
make_docx_demo/check_test_table/image_preprocess.py

@@ -0,0 +1,388 @@
+# -*- coding:utf-8 -*-
+"""
+需要增加,2个上下单词的黑点,靠近哪一边的算法,从而解决上下错位的问题
+
+"""
+import re
+import time
+
+from PIL import Image, ImageFilter
+import numpy as np
+import cv2
+import json
+from pathlib import Path
+from baidu_ocr import high_ocr_location
+
+
+def test_log(text: str):
+    if type(text) == dict:
+        text = json.dumps(text, ensure_ascii=False)
+    with open("log.txt", "w", encoding="utf-8") as f:
+        f.write(str(text))
+
+
+class PreprocessImage:
+    def __init__(self, image_path):
+        self.image_path = image_path 
+        self.template_image_path = "template.jpg" 
+
+        self.image = cv2.imread(image_path) 
+        self.template_image = cv2.imread(self.template_image_path)
+        self.temp_h, self.temp_w = self.template_image.shape[:2] 
+
+    def correct_image(self, point_tuple,image_path='sharpen_image.jpg'):
+        """图像矫正
+        point_tuple:传过来的4个点坐标的元组"""
+        sharpen_image = cv2.imread(image_path)
+
+        src_points = np.float32(point_tuple)
+
+       
+       
+        dst_points = np.float32([[122, 78], [1070, 78], [122, 2730], [1070, 2730]]) 
+
+        M = cv2.getPerspectiveTransform(src_points, dst_points)
+       
+        transformed_image = cv2.warpPerspective(sharpen_image, M, (self.temp_w, self.temp_h))
+
+       
+        gray = cv2.cvtColor(transformed_image, cv2.COLOR_BGR2GRAY)
+
+       
+        blurred = cv2.GaussianBlur(gray, (5, 5), 0)
+
+       
+       
+
+       
+       
+       
+       
+       
+       
+       
+       
+
+       
+        image_rgb = cv2.cvtColor(blurred, cv2.COLOR_BGR2RGB)
+       
+        cv2.imwrite('transformed_image.jpg', image_rgb)
+
+    def sharpen_image(self):
+       
+        img = Image.open(self.image_path)
+        sharpened_img = img.filter(ImageFilter.SHARPEN)
+        sharpened_img.save('sharpen_image.jpg') 
+
+    @staticmethod
+    def parser_ocr(ocr_data):
+        p1, p2, p3, p4 = None, None, None, None
+        for word_item in ocr_data['words_result']:
+            text: str = word_item['words']
+            if text.startswith("1."):
+                left_char_location = word_item['chars'][0]['location']
+                p1 = (left_char_location['left'], left_char_location['top']) 
+            elif text.startswith("51."):
+                left_char_location = word_item['chars'][0]['location']
+                p2 = (left_char_location['left'], left_char_location['top']) 
+            elif text.startswith("50."):
+                left_char_location = word_item['chars'][0]['location']
+                p3 = (left_char_location['left'], left_char_location['top']) 
+            elif text.startswith("100."):
+                left_char_location = word_item['chars'][0]['location']
+                p4 = (left_char_location['left'], left_char_location['top']) 
+
+           
+           
+           
+           
+           
+           
+           
+           
+           
+           
+
+        if any([not p1, not p2, not p3, not p4]):
+            print([p1, p2, p3, p4])
+            raise Exception("矫正坐标不对")
+
+        return [p1, p2, p3, p4]
+
+    def run(self):
+       
+        self.sharpen_image() 
+        ocr_data = high_ocr_location(self.image_path)
+        point_tuple = self.parser_ocr(ocr_data)
+        self.correct_image(point_tuple)
+
+
+class ComparisonAlgorithm:
+    """比较算法核心"""
+
+    def __init__(self, transformed_image, ocr_data):
+        self.transformed_image = cv2.imread(transformed_image) 
+        self.ocr_data = ocr_data 
+        self.order_ocr_data = {} 
+        self.already_find_index = set() 
+
+        self.image = Image.open(transformed_image) 
+
+    @staticmethod
+    def separate_numbers_and_letters(text):
+        """正则提取数字和字母"""
+        numbers = "".join(re.findall(r'\d+', text)) 
+        letters = "".join(re.findall(r'[a-zA-Z]+', text)) 
+        return numbers, letters
+
+    def is_line_word(self, x, y):
+        """判断点的颜色是否符合标准; cv2取点速度没有pillow快
+        指定要查询的点的坐标 (x, y)"""
+
+       
+       
+       
+
+        rgb_color = self.image.getpixel((x, y))
+        r, g, b = rgb_color
+
+        if all([r < 130, g < 130, b < 130]):
+            return 1
+        return 0
+
+    def __make_order_ocr_data(self):
+        for word_item in self.ocr_data['words_result']:
+            word = word_item['words']
+            if word[0].isdigit() and len(word) >= 2: 
+               
+                word_text = word_item['words']
+                location = word_item['location'] 
+                first_char_location = word_item['chars'][0]['location'] 
+                end_char_location = word_item['chars'][-1]['location'] 
+                chars_location = word_item['chars'] 
+
+                numbers, letters = self.separate_numbers_and_letters(word_text)
+                if numbers not in self.order_ocr_data: 
+                    self.order_ocr_data[numbers] = {"word": letters, "location": location, "chars_location": chars_location,
+                                                    "first_char_location": first_char_location, "end_char_location": end_char_location}
+
+    def color_algorithm_1(self, int_index, word_location, first_char_location, word):
+        """
+        颜色算法1,正常单词
+        int_index:整数序号
+        word_location:这个序号的单词的整行位置;对应 self.order_ocr_data[current_index]['location']
+        first_char_location: 第一个字母的位置;对应 self.order_ocr_data[current_index]['first_char_location']
+        word:具体序号的单词,标识用
+        """
+        next_index = str(int_index + 1) 
+        black_count_1 = 0 
+
+        for x in range(word_location['left'], word_location['left'] + word_location['width']):
+
+           
+            b_top, b_height = first_char_location['top'], int(first_char_location['height'])
+            bottom_location_y = b_top + b_height 
+
+            if int_index == 50 or int_index == 100:
+                next_word_top_location = bottom_location_y + b_height * 2
+           
+            elif next_index in self.order_ocr_data and (
+                    self.order_ocr_data[next_index]['first_char_location']['top'] - bottom_location_y) < b_height:
+                next_word_location = self.order_ocr_data[next_index]['first_char_location']
+                next_word_top_location = next_word_location['top'] + int(next_word_location['height'] / 8)
+            else:
+               
+               
+                next_word_top_location = bottom_location_y + int(b_height * 0.5)
+
+            for y in range(bottom_location_y, next_word_top_location):
+                result = self.is_line_word(x, y)
+                if result:
+                    black_count_1 += 1
+                    break
+
+        black_count_per = black_count_1 / (word_location['width'])
+        if black_count_per > 0.8: 
+            print(f"{int_index}正常划线{black_count_per:.2f}", word)
+            self.already_find_index.add(int_index)
+            return int_index 
+       
+       
+
+    def color_algorithm_2(self, int_index, word_location, word):
+        """颜色算法2,单词自身中间的黑点率
+        int_index:整数序号
+        word_location:这个序号的单词的整行位置;对应 self.order_ocr_data[current_index]['location']
+        word:具体序号的单词,标识用
+        """
+        black_count_2 = 0 
+        for x in range(word_location['left'], word_location['left'] + word_location['width']):
+
+            mid = word_location['top'] + int(word_location['height'] / 2)
+            bottom = word_location['top'] + int(word_location['height']) + 5 
+
+            for y in range(mid, bottom):
+                result = self.is_line_word(x, y)
+                if result:
+                    black_count_2 += 1
+                    break
+
+        black_count_per = black_count_2 / (word_location['width'])
+        if black_count_per > 0.92: 
+            print(f"{int_index}中间划线{black_count_per:.2f}", word)
+            self.already_find_index.add(int_index)
+            return int_index 
+       
+       
+
+    def color_algorithm_3(self, int_index, word_location, end_char_location, word):
+        """
+        颜色算法3,正常单词的左右各推20个像素点,判断下黑点率
+        int_index:整数序号
+        word_location:这个序号的单词的整行位置;对应 self.order_ocr_data[current_index]['location']
+        end_char_location: 最后一个字母的位置;对应 self.order_ocr_data[current_index]['end_char_location']
+        word:具体序号的单词,标识用
+        """
+        next_index = str(int_index + 1) 
+        black_count_1 = 0 
+        moving_distance = 20
+
+        """这是在获取所有需要的横向左右x坐标"""
+        all_x = [] 
+        for i in range(word_location['left'] - moving_distance, word_location['left']):
+            all_x.append(i)
+        word_right_loca = word_location['left'] + word_location['width'] + 2 
+        for i in range(word_right_loca, word_right_loca + moving_distance):
+            all_x.append(i)
+
+        b_top, b_height = word_location['top'], int(word_location['height'])
+        bottom_location_y = b_top + b_height 
+       
+        bottom_location_y_half = end_char_location['top'] + int(end_char_location['height'] * 0.8) 
+
+        for x in all_x:
+            if int_index == 50 or int_index == 100:
+                next_word_top_location = bottom_location_y + b_height * 2
+           
+            elif next_index in self.order_ocr_data and (
+                    self.order_ocr_data[next_index]['first_char_location']['top'] - bottom_location_y) < b_height:
+                next_word_location = self.order_ocr_data[next_index]['first_char_location']
+                next_word_top_location = next_word_location['top'] + 3 
+            else:
+               
+               
+                next_word_top_location = bottom_location_y + int(b_height * 0.3)
+
+            for y in range(bottom_location_y_half, next_word_top_location):
+                result = self.is_line_word(x, y)
+                if result:
+                    black_count_1 += 1
+                    break
+
+        black_count_per = black_count_1 / len(all_x)
+        if black_count_per > 0.4: 
+            print(f"{int_index}前后双边划线{black_count_per:.2f}", word)
+            self.already_find_index.add(int_index)
+            return int_index 
+       
+       
+
+    def color_algorithm_4(self, int_index, word_location, chars_location, word):
+        """灰度图极差算法"""
+       
+       
+
+        for char_index, char_dict in enumerate(chars_location):
+            if char_dict['char'] == '.' or char_dict['char'] == ',':
+                point_location, point_char_index = char_dict['location'], char_index
+                break
+        else: 
+            char_index = 2
+            point_location, point_char_index = chars_location[char_index]['location'], char_index
+
+        white_block = 0
+        point_location_half = point_location['top'] + point_location['height']//2
+        y1, y2 = point_location_half, point_location_half + point_location['height']
+        for x in range(point_location['left'], point_location['left'] + point_location['width']):
+            roi_image = self.transformed_image[y1:y2, x:x + 1]
+            min_val = np.min(roi_image)
+            max_val = np.max(roi_image)
+            range_value = max_val - min_val
+            if min_val>110 or range_value < 90:
+                white_block +=1
+
+        if white_block/point_location['width'] < 0.1:
+            print(f"{int_index}极差算法 {word},左{point_location['left']},宽{point_location['width']},高{point_location['height']},{y1},{y2}")
+            self.already_find_index.add(int_index)
+            return int_index
+
+       
+        white_block = 0
+        end_char_location = chars_location[-2]['location']
+        bottom = end_char_location['top'] + end_char_location['height']
+        y1, y2 = bottom+2, bottom + end_char_location['height']-10
+        for x in range(end_char_location['left'], end_char_location['left'] + point_location['width']):
+            roi_image = self.transformed_image[y1:y2, x:x + 1]
+            min_val = np.min(roi_image)
+            max_val = np.max(roi_image)
+            range_value = max_val - min_val
+            if min_val>110 or range_value < 90:
+                white_block +=1
+
+        if white_block/point_location['width'] < 0.1:
+            print(f"{int_index}极差算法二 {word},左{point_location['left']},宽{point_location['width']},高{point_location['height']},{y1},{y2}")
+            self.already_find_index.add(int_index)
+            return int_index
+
+
+
+   
+    def core_algorithm(self):
+        self.__make_order_ocr_data()
+
+        for int_index in range(1, 101):
+            current_index = str(int_index)
+            if current_index not in self.order_ocr_data:
+                continue
+
+            current_dict = self.order_ocr_data[current_index]
+            word = current_dict['word'] 
+            word_location = current_dict['location'] 
+            first_char_location = current_dict['first_char_location'] 
+            end_char_location = current_dict['end_char_location'] 
+            chars_location = current_dict['chars_location']
+
+            if self.color_algorithm_1(int_index=int_index, word_location=word_location, first_char_location=first_char_location, word=word):
+                continue
+
+            if self.color_algorithm_2(int_index=int_index, word_location=word_location, word=word):
+                continue
+
+            if self.color_algorithm_3(int_index=int_index, word_location=word_location, end_char_location=end_char_location, word=word):
+                continue
+
+            if self.color_algorithm_4(int_index=int_index, word_location=word_location, chars_location=chars_location, word=word):
+                continue
+
+
+if __name__ == '__main__':
+   
+    image_path = r"C:\Users\86131\Desktop\4.jpg"
+
+   
+    script_path = Path(__file__).resolve()
+   
+    script_directory = script_path.parent
+   
+    transformed_image_path = str(Path(script_directory, r"transformed_image.jpg"))
+
+   
+    pi = PreprocessImage(image_path)
+    pi.run()
+
+    transformed_image_ocr_data = high_ocr_location(transformed_image_path) 
+    test_log(transformed_image_ocr_data) 
+
+    ca = ComparisonAlgorithm(transformed_image=transformed_image_path, ocr_data=transformed_image_ocr_data)
+    ca.core_algorithm()
+

+ 363 - 0
make_docx_demo/check_test_table/image_preprocess2.py

@@ -0,0 +1,363 @@
+# -*- coding:utf-8 -*-
+"""
+20250114 在单词上划线,分别有斜杠、反斜杠、横着划线三种方式;找到它们的位置
+
+"""
+import re
+import time
+
+from PIL import Image, ImageFilter
+import numpy as np
+import cv2
+import json
+from pathlib import Path
+from baidu_ocr import high_ocr_location
+
+
+def test_log(text: str):
+    if type(text) == dict:
+        text = json.dumps(text, ensure_ascii=False)
+    with open("log.txt", "w", encoding="utf-8") as f:
+        f.write(str(text))
+
+
+class PreprocessImage:
+    def __init__(self, image_path):
+        self.image_path = image_path 
+        self.template_image_path = "template.jpg" 
+
+        self.image = cv2.imread(image_path) 
+        self.template_image = cv2.imread(self.template_image_path)
+        self.temp_h, self.temp_w = self.template_image.shape[:2] 
+
+    def correct_image(self, point_tuple,image_path='sharpen_image.jpg'):
+        """图像矫正
+        point_tuple:传过来的4个点坐标的元组"""
+        sharpen_image = cv2.imread(image_path)
+
+        src_points = np.float32(point_tuple)
+
+       
+       
+        dst_points = np.float32([[122, 78], [1070, 78], [122, 2730], [1070, 2730]]) 
+
+        M = cv2.getPerspectiveTransform(src_points, dst_points)
+       
+        transformed_image = cv2.warpPerspective(sharpen_image, M, (self.temp_w, self.temp_h))
+
+       
+        gray = cv2.cvtColor(transformed_image, cv2.COLOR_BGR2GRAY)
+
+       
+        blurred = cv2.GaussianBlur(gray, (5, 5), 0)
+
+       
+       
+
+       
+       
+       
+       
+       
+       
+       
+       
+
+       
+        image_rgb = cv2.cvtColor(blurred, cv2.COLOR_BGR2RGB)
+       
+        cv2.imwrite('transformed_image.jpg', image_rgb)
+
+    def sharpen_image(self):
+       
+        img = Image.open(self.image_path)
+        sharpened_img = img.filter(ImageFilter.SHARPEN)
+        sharpened_img.save('sharpen_image.jpg') 
+
+    @staticmethod
+    def parser_ocr(ocr_data):
+        for word_item in ocr_data['words_result']:
+           
+            for char_item in word_item['chars']:
+
+                pass
+
+
+
+    def run(self):
+       
+        self.sharpen_image() 
+        ocr_data = high_ocr_location(self.image_path)
+        point_tuple = self.parser_ocr(ocr_data)
+        self.correct_image(point_tuple)
+
+
+class ComparisonAlgorithm:
+    """比较算法核心"""
+
+    def __init__(self, transformed_image, ocr_data):
+        self.transformed_image = cv2.imread(transformed_image) 
+        self.ocr_data = ocr_data 
+        self.order_ocr_data = {} 
+        self.already_find_index = set() 
+
+        self.image = Image.open(transformed_image) 
+
+    @staticmethod
+    def separate_numbers_and_letters(text):
+        """正则提取数字和字母"""
+        numbers = "".join(re.findall(r'\d+', text)) 
+        letters = "".join(re.findall(r'[a-zA-Z]+', text)) 
+        return numbers, letters
+
+    def is_line_word(self, x, y):
+        """判断点的颜色是否符合标准; cv2取点速度没有pillow快
+        指定要查询的点的坐标 (x, y)"""
+
+       
+       
+       
+
+        rgb_color = self.image.getpixel((x, y))
+        r, g, b = rgb_color
+
+        if all([r < 130, g < 130, b < 130]):
+            return 1
+        return 0
+
+    def __make_order_ocr_data(self):
+        for word_item in self.ocr_data['words_result']:
+            word = word_item['words']
+            if word[0].isdigit() and len(word) >= 2: 
+               
+                word_text = word_item['words']
+                location = word_item['location'] 
+                first_char_location = word_item['chars'][0]['location'] 
+                end_char_location = word_item['chars'][-1]['location'] 
+                chars_location = word_item['chars'] 
+
+                numbers, letters = self.separate_numbers_and_letters(word_text)
+                if numbers not in self.order_ocr_data: 
+                    self.order_ocr_data[numbers] = {"word": letters, "location": location, "chars_location": chars_location,
+                                                    "first_char_location": first_char_location, "end_char_location": end_char_location}
+
+    def color_algorithm_1(self, int_index, word_location, first_char_location, word):
+        """
+        颜色算法1,正常单词
+        int_index:整数序号
+        word_location:这个序号的单词的整行位置;对应 self.order_ocr_data[current_index]['location']
+        first_char_location: 第一个字母的位置;对应 self.order_ocr_data[current_index]['first_char_location']
+        word:具体序号的单词,标识用
+        """
+        next_index = str(int_index + 1) 
+        black_count_1 = 0 
+
+        for x in range(word_location['left'], word_location['left'] + word_location['width']):
+
+           
+            b_top, b_height = first_char_location['top'], int(first_char_location['height'])
+            bottom_location_y = b_top + b_height 
+
+            if int_index == 50 or int_index == 100:
+                next_word_top_location = bottom_location_y + b_height * 2
+           
+            elif next_index in self.order_ocr_data and (
+                    self.order_ocr_data[next_index]['first_char_location']['top'] - bottom_location_y) < b_height:
+                next_word_location = self.order_ocr_data[next_index]['first_char_location']
+                next_word_top_location = next_word_location['top'] + int(next_word_location['height'] / 8)
+            else:
+               
+               
+                next_word_top_location = bottom_location_y + int(b_height * 0.5)
+
+            for y in range(bottom_location_y, next_word_top_location):
+                result = self.is_line_word(x, y)
+                if result:
+                    black_count_1 += 1
+                    break
+
+        black_count_per = black_count_1 / (word_location['width'])
+        if black_count_per > 0.8: 
+            print(f"{int_index}正常划线{black_count_per:.2f}", word)
+            self.already_find_index.add(int_index)
+            return int_index 
+       
+       
+
+    def color_algorithm_2(self, int_index, word_location, word):
+        """颜色算法2,单词自身中间的黑点率
+        int_index:整数序号
+        word_location:这个序号的单词的整行位置;对应 self.order_ocr_data[current_index]['location']
+        word:具体序号的单词,标识用
+        """
+        black_count_2 = 0 
+        for x in range(word_location['left'], word_location['left'] + word_location['width']):
+
+            mid = word_location['top'] + int(word_location['height'] / 2)
+            bottom = word_location['top'] + int(word_location['height']) + 5 
+
+            for y in range(mid, bottom):
+                result = self.is_line_word(x, y)
+                if result:
+                    black_count_2 += 1
+                    break
+
+        black_count_per = black_count_2 / (word_location['width'])
+        if black_count_per > 0.92: 
+            print(f"{int_index}中间划线{black_count_per:.2f}", word)
+            self.already_find_index.add(int_index)
+            return int_index 
+       
+       
+
+    def color_algorithm_3(self, int_index, word_location, end_char_location, word):
+        """
+        颜色算法3,正常单词的左右各推20个像素点,判断下黑点率
+        int_index:整数序号
+        word_location:这个序号的单词的整行位置;对应 self.order_ocr_data[current_index]['location']
+        end_char_location: 最后一个字母的位置;对应 self.order_ocr_data[current_index]['end_char_location']
+        word:具体序号的单词,标识用
+        """
+        next_index = str(int_index + 1) 
+        black_count_1 = 0 
+        moving_distance = 20
+
+        """这是在获取所有需要的横向左右x坐标"""
+        all_x = [] 
+        for i in range(word_location['left'] - moving_distance, word_location['left']):
+            all_x.append(i)
+        word_right_loca = word_location['left'] + word_location['width'] + 2 
+        for i in range(word_right_loca, word_right_loca + moving_distance):
+            all_x.append(i)
+
+        b_top, b_height = word_location['top'], int(word_location['height'])
+        bottom_location_y = b_top + b_height 
+       
+        bottom_location_y_half = end_char_location['top'] + int(end_char_location['height'] * 0.8) 
+
+        for x in all_x:
+            if int_index == 50 or int_index == 100:
+                next_word_top_location = bottom_location_y + b_height * 2
+           
+            elif next_index in self.order_ocr_data and (
+                    self.order_ocr_data[next_index]['first_char_location']['top'] - bottom_location_y) < b_height:
+                next_word_location = self.order_ocr_data[next_index]['first_char_location']
+                next_word_top_location = next_word_location['top'] + 3 
+            else:
+               
+               
+                next_word_top_location = bottom_location_y + int(b_height * 0.3)
+
+            for y in range(bottom_location_y_half, next_word_top_location):
+                result = self.is_line_word(x, y)
+                if result:
+                    black_count_1 += 1
+                    break
+
+        black_count_per = black_count_1 / len(all_x)
+        if black_count_per > 0.4: 
+            print(f"{int_index}前后双边划线{black_count_per:.2f}", word)
+            self.already_find_index.add(int_index)
+            return int_index 
+       
+       
+
+    def color_algorithm_4(self, int_index, word_location, chars_location, word):
+        """灰度图极差算法"""
+       
+       
+
+        for char_index, char_dict in enumerate(chars_location):
+            if char_dict['char'] == '.' or char_dict['char'] == ',':
+                point_location, point_char_index = char_dict['location'], char_index
+                break
+        else: 
+            char_index = 2
+            point_location, point_char_index = chars_location[char_index]['location'], char_index
+
+        white_block = 0
+        point_location_half = point_location['top'] + point_location['height']//2
+        y1, y2 = point_location_half, point_location_half + point_location['height']
+        for x in range(point_location['left'], point_location['left'] + point_location['width']):
+            roi_image = self.transformed_image[y1:y2, x:x + 1]
+            min_val = np.min(roi_image)
+            max_val = np.max(roi_image)
+            range_value = max_val - min_val
+            if min_val>110 or range_value < 90:
+                white_block +=1
+
+        if white_block/point_location['width'] < 0.1:
+            print(f"{int_index}极差算法 {word},左{point_location['left']},宽{point_location['width']},高{point_location['height']},{y1},{y2}")
+            self.already_find_index.add(int_index)
+            return int_index
+
+       
+        white_block = 0
+        end_char_location = chars_location[-2]['location']
+        bottom = end_char_location['top'] + end_char_location['height']
+        y1, y2 = bottom+2, bottom + end_char_location['height']-10
+        for x in range(end_char_location['left'], end_char_location['left'] + point_location['width']):
+            roi_image = self.transformed_image[y1:y2, x:x + 1]
+            min_val = np.min(roi_image)
+            max_val = np.max(roi_image)
+            range_value = max_val - min_val
+            if min_val>110 or range_value < 90:
+                white_block +=1
+
+        if white_block/point_location['width'] < 0.1:
+            print(f"{int_index}极差算法二 {word},左{point_location['left']},宽{point_location['width']},高{point_location['height']},{y1},{y2}")
+            self.already_find_index.add(int_index)
+            return int_index
+
+
+
+   
+    def core_algorithm(self):
+        self.__make_order_ocr_data()
+
+        for int_index in range(1, 101):
+            current_index = str(int_index)
+            if current_index not in self.order_ocr_data:
+                continue
+
+            current_dict = self.order_ocr_data[current_index]
+            word = current_dict['word'] 
+            word_location = current_dict['location'] 
+            first_char_location = current_dict['first_char_location'] 
+            end_char_location = current_dict['end_char_location'] 
+            chars_location = current_dict['chars_location']
+
+            if self.color_algorithm_1(int_index=int_index, word_location=word_location, first_char_location=first_char_location, word=word):
+                continue
+
+            if self.color_algorithm_2(int_index=int_index, word_location=word_location, word=word):
+                continue
+
+            if self.color_algorithm_3(int_index=int_index, word_location=word_location, end_char_location=end_char_location, word=word):
+                continue
+
+            if self.color_algorithm_4(int_index=int_index, word_location=word_location, chars_location=chars_location, word=word):
+                continue
+
+
+if __name__ == '__main__':
+   
+    image_path = r"C:\Users\86131\Desktop\4.jpg"
+
+   
+    script_path = Path(__file__).resolve()
+   
+    script_directory = script_path.parent
+   
+    transformed_image_path = str(Path(script_directory, r"transformed_image.jpg"))
+
+   
+    pi = PreprocessImage(image_path)
+    pi.run()
+
+    transformed_image_ocr_data = high_ocr_location(transformed_image_path) 
+    test_log(transformed_image_ocr_data) 
+
+    ca = ComparisonAlgorithm(transformed_image=transformed_image_path, ocr_data=transformed_image_ocr_data)
+    ca.core_algorithm()
+

File diff suppressed because it is too large
+ 0 - 0
make_docx_demo/check_test_table/log.txt


+ 42 - 0
make_docx_demo/check_test_table/mark_ocr_loca.py

@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+"""测试ocr的位置,与预期是否一致"""
+from PIL import Image, ImageDraw
+import json
+from pathlib import Path
+
+
+def draw_rectangles_on_image(image_path, rectangles, output_path):
+   
+    image = Image.open(image_path)
+    draw = ImageDraw.Draw(image)
+
+   
+    for rectangle in rectangles:
+        top_left = (rectangle['left'], rectangle['top'])
+        bottom_right = (rectangle['left'] + rectangle['width'], rectangle['top'] + rectangle['height'])
+        draw.rectangle([top_left, bottom_right], outline='red', width=2)
+
+   
+    image.save(output_path)
+
+
+rectangles = [
+   
+   
+   
+]
+
+with open("log.txt", "r", encoding="utf-8") as f:
+    try:
+        ocr_data = json.loads(f.read())
+    except json.decoder.JSONDecodeError:
+        print("json格式化错误")
+
+for i in ocr_data['words_result']:
+    for char_loca in i['chars']:
+        rectangles.append(char_loca['location'])
+
+script_path = Path(__file__).resolve()
+script_directory = script_path.parent
+transformed_image_path = str(Path(script_directory, r"transformed_image.jpg"))
+draw_rectangles_on_image(transformed_image_path, rectangles, 'output_with_rectangles.jpg')

BIN
make_docx_demo/check_test_table/output_with_rectangles.jpg


BIN
make_docx_demo/check_test_table/sharpen_image.jpg


BIN
make_docx_demo/check_test_table/template.jpg


BIN
make_docx_demo/check_test_table/transformed_image.jpg


File diff suppressed because it is too large
+ 4 - 0
make_docx_demo/data.py


+ 146 - 0
make_docx_demo/docx_other_func.py

@@ -0,0 +1,146 @@
+# -*- coding: utf-8 -*-
+from functools import wraps
+import time
+import io
+import qrcode
+from docx.shared import RGBColor
+from base64 import b64decode
+import datetime
+
+import matplotlib.pyplot as plt
+plt.switch_backend('Agg')
+from io import BytesIO
+from tools.loglog import logger, log_err_e
+from docx import Document
+from docx.shared import Inches,Cm
+from threading import Lock
+from config.read_config import address
+
+lock = Lock()
+
+width_cm, height_cm = 5.4, 3
+width_in = width_cm
+height_in = height_cm
+
+plt.figure(figsize=(width_in, height_in)) 
+
+
+def hex_to_rgb(hex_color:str):
+    hex_color = hex_color.lstrip('#') 
+    return RGBColor(int(hex_color[0:2], 16), int(hex_color[2:4], 16), int(hex_color[4:6], 16))
+
+def rgb_to_hex(r, g, b):
+    return '{:02x}{:02x}{:02x}'.format(r, g, b)
+
+def is_base64(text):
+    try:
+       
+        image_bytes =b64decode(text)
+        return image_bytes
+    except Exception:
+       
+        return False
+
+
+def time_use(fn):
+    @wraps(fn)
+    def cc(*args,**kwargs): 
+        f_time = time.time()
+        res = fn(*args,**kwargs)
+
+        cha = round(time.time()-f_time,3)
+        if cha > 0.3:
+            print(f'函数:{fn.__name__} 一共用时',cha,'秒')
+        return res 
+    return cc 
+
+
+def qrcode_maker(id_text=None,full_url=None) -> BytesIO:
+    """
+    :param id_text: id_text 提供id,二维码地址是春笋筛查表的地址;http://dcjxb.yunzhixue.cn/link?type=scanpage&id=999;
+    :param full_url: 如果提供,直接使用这个文本来生成二维码的地址
+    :return: 二维码图的IO对象,记得关闭
+    """
+    text = f"{address}/link?type=scanpage&id={id_text}"
+    if full_url:
+        text = full_url
+    qr = qrcode.QRCode(
+        version=1,
+        error_correction=qrcode.constants.ERROR_CORRECT_L,
+        box_size=12,
+        border=4,
+    )
+    qr.add_data(text)
+    qr.make(fit=True)
+
+   
+    img = qr.make_image(fill_color="black", back_color="white")
+    img_byte_arr = io.BytesIO()
+    img.save(img_byte_arr, format='PNG')
+    img_byte_arr.seek(0)
+   
+    return img_byte_arr
+
+
+
+def get_weekday():
+    today = datetime.date.today()
+    weekday_index = today.weekday()
+    weekdays_chinese = ['星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日']
+    weekday_chinese = weekdays_chinese[weekday_index]
+    return weekday_chinese
+
+
+def make_chart(x_axis_data,y_axis_datas,title,sub_title_list,x_axis_label=None,y_axis_label=None):
+    """
+    :param sub_title_list: 小标题集合,放在右上角,用来标记每个y轴的数据标题
+    :param y_axis_label:Y轴文本
+    :param x_axis_label:X轴文本
+    :param title:图标标题
+    :param x_axis_data: X数据列表,单列表数据,一般就日期。
+    :param y_axis_datas: Y数据列表[合集],是个大列表。因为可能会有多个y线
+    :return:
+    """
+    x_len = len(x_axis_data)
+
+   
+    image_io = BytesIO()
+
+    font1 = {'family': 'SimSun', 'weight': 'normal', 'size': 14}
+    plt.rc('font', **font1)
+    plt.rcParams["axes.unicode_minus"] = False
+    try:
+        for y in y_axis_datas:
+            if len(y) != x_len:
+                logger.error("x轴的y轴的数据个数不一致")
+            plt.plot(x_axis_data, y, marker='o',label="zxs") 
+
+        plt.title(title) 
+        if x_axis_label:
+            plt.xlabel(x_axis_label) 
+        if y_axis_label:
+            plt.ylabel(y_axis_label) 
+        plt.grid(True) 
+
+        for index,sub_title in enumerate(sub_title_list):
+            plt.text(0.95, 0.9-index*0.15, sub_title, transform=plt.gca().transAxes, fontsize=10, va='top', ha='right', backgroundcolor='w')
+        with lock:
+            plt.savefig(image_io, format='png', bbox_inches='tight') 
+            image_io.seek(0) 
+
+        return image_io
+    except Exception as e:
+        log_err_e(e,"折线图生成错误")
+        image_io.close()
+        return None
+
+
+if __name__ == '__main__':
+   
+   
+
+    t= time.time()
+    io = qrcode_maker('',"http://111.231.167.191:8001/mp3")
+    with open("1.jpg",'wb') as f:
+        f.write(io.read())
+    print(time.time()-t)

+ 30 - 0
make_docx_demo/get_standard_data.py

@@ -0,0 +1,30 @@
+# -*- coding:utf-8 -*-
+"""获取学段标准数据"""
+from cachetools import TTLCache,cached
+import requests
+from tools.loglog import logger, log_err_e
+
+
+cache = TTLCache(maxsize=100, ttl=86400)
+
+
+def get_standard_data(student_stage:int):
+    if student_stage in cache:
+        return cache[student_stage]
+
+    url = "https://dcjxb.yunzhixue.cn/api-dev/standard/study"
+    params = {"stage":student_stage}
+    response = requests.get(url,params=params)
+    if response.status_code == 200:
+        data_obj = response.json()['data'] 
+       
+        return_data = data_obj['totalVocabulary'],data_obj['readingAccuracy'],data_obj['readingLevel'],data_obj['readingSpeed']
+
+        cache[student_stage] = return_data 
+        return return_data
+
+
+if __name__ == '__main__':
+    print(get_standard_data(3))
+    print(cache)
+    print(1 in cache,2 in cache,3 in cache)

+ 1166 - 0
make_docx_demo/main_word.py

@@ -0,0 +1,1166 @@
+# -*- coding: UTF-8 -*-
+import time
+import re
+import math
+from io import BytesIO
+from random import randint, shuffle
+from threading import Thread
+
+from docx_base import Word, Table, hex_to_rgb, rgb_to_hex, ParagraphBase
+from docx.shared import Pt, Inches, Cm, RGBColor
+from docx.enum.text import WD_COLOR_INDEX
+from make_docx_demo.data import *
+from make_docx_demo.docx_other_func import time_use, qrcode_maker, get_weekday, make_chart
+from tools.loglog import logger, log_err_e
+from make_docx_demo.word2pdf import convert_word_to_pdf
+
+num_dict = {1: "❶", 2: "❷", 3: "❸", 4: "❹", 5: "❺", 6: "❻", 7: "❼", 8: "❽", 9: "❾",
+            10: "❿", 11: "⓫", 12: "⓬", 13: "⓭", 14: "⓮", 15: "⓯", 16: "⓰", 17: "⓱", 18: "⓲", 19: "⓳", 20: "⓴"}
+
+
+
+@time_use
+def header_maker(docx: Word, json_data):
+    exercise_id = str(json_data.get("ExerciseId", "")).rjust(11, "0") 
+    exercise_title = json_data.get("ExerciseTitle", "") 
+    exercise_level = json_data['StudentInfo']['StudentStudy']['ReadingLevel'] 
+
+    student_name = json_data.get("StudentInfo").get("StudentName", '')
+    class_name = json_data.get("StudentInfo").get("ClassName", '')
+    t_date = time.strftime("%Y-%m-%d", time.localtime())
+    t_weekday = get_weekday()
+    t_time = time.strftime("%H:%M:%S", time.localtime())
+
+    for i in range(2, len(docx.doc.sections) - 1):
+        tb_header = docx.add_header_table(rows=1, cols=5, section_index=i, tb_name="页眉表格")
+
+        tb_header.set_cell_text(0, 0, "春笋英语", bold=True, size=16, color=(220, 220, 220), border=False, chinese_font_name="黑体")
+        tb_header.set_cell_text(0, 1, f"{class_name}\n{student_name}", size=8, border=False, color=(220, 220, 220))
+        tb_header.set_cell_text(0, 2, f"{exercise_title}\n{exercise_level}级", size=8, border=False, color=(220, 220, 220))
+        tb_header.set_cell_text(0, 3, f"{exercise_id}", bold=True, size=24, border=False, color=(220, 220, 220))
+        tb_header.set_cell_text(0, 4, f"{t_date}\n{t_weekday}\n{t_time}", size=8, border=False, color=(220, 220, 220))
+
+        tb_header.set_tb_colum_width(width=[100, 70, 70, 150, 80])
+       
+
+    target_section = docx.doc.sections[-1] 
+    target_section.header.is_linked_to_previous = False
+   
+    for paragraph in target_section.header.paragraphs:
+        paragraph.clear() 
+   
+   
+
+    target_section.header_distance = 0
+    target_section.footer_distance = 280000
+
+
+@time_use
+def sub_title_maker(docx: Word, main_title, sub_title_name1, sub_title_name2='春笋学习,高效学习专家'):
+    p = docx.add_blank_paragraph()
+    line_width = 205
+    main_rect_x = line_width + 10
+    main_rect_width = 150 
+
+    right_line_x = main_rect_x + main_rect_width + 10 
+
+    p.add_rectangle(main_title, x=main_rect_x, y=4, fill_color="000000", width=main_rect_width, height=48, font_color="ffffff",
+                    font_size=18)
+    p.add_rectangle("", x=0, y=50, boder_color="808080", width=line_width, height=2)
+    p.add_rectangle("", x=right_line_x, y=50, boder_color="808080", width=line_width, height=2)
+
+    p.add_rectangle(f"【{sub_title_name1}】", x=0, y=20, width=line_width, height=40, font_size=8, chinese_font="宋体")
+    p.add_rectangle(sub_title_name2, x=right_line_x, y=20, width=line_width, height=40, font_color="808080", font_size=8,
+                    chinese_font="宋体")
+    docx.add_blank_paragraph()
+    docx.add_blank_paragraph()
+    docx.add_blank_paragraph()
+
+
+@time_use
+def section_1(docx: Word, json_data, *args, **kwargs):
+    exercise_id = str(json_data.get("ExerciseId", "")).rjust(11, "0")
+    student_name = json_data.get("StudentInfo").get("StudentName", '')
+    t_date_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+    reading_speed = json_data['StudentInfo']['StudentStudy']['ReadingSpeed'] 
+    reading_accuracy = json_data['StudentInfo']['StudentStudy']['ReadingAccuracy'] 
+    reading_level = json_data['StudentInfo']['StudentStudy']['ReadingLevel'] 
+
+   
+    chart_width = 5.4
+    all_chart = json_data['StudentInfo']['StudentStudy']['ChartData']
+    reading_speed_chart = all_chart["ReadingSpeed"] 
+    reading_accuracy_chart = all_chart["ReadingAccuracy"] 
+    reading_difficult_chart = all_chart["ReadingDifficulties"] 
+
+    reading_speed_x_data = reading_speed_chart['XAxis'] 
+    reading_speed_sub_title = reading_speed_chart['Legend'] 
+    reading_speed_y_datas = [i['Data'] for i in reading_speed_chart['Series']] 
+
+    reading_accuracy_x_data = reading_accuracy_chart['XAxis'] 
+    reading_accuracy_sub_title = reading_accuracy_chart['Legend'] 
+    reading_accuracy_y_datas = [i['Data'] for i in reading_accuracy_chart['Series']] 
+
+    reading_difficult_x_data = reading_difficult_chart['XAxis'] 
+    reading_difficult_sub_title = reading_difficult_chart['Legend'] 
+    reading_difficult_y_datas = [i['Data'] for i in reading_difficult_chart['Series']] 
+
+    "开始版面-------------------------------------------------"
+
+    docx.add_paragraph("春笋英语     减负增效", size=24, bold=True, align="center", dq=20, dh=10)
+    docx.add_paragraph("个性化学案", size=42, bold=True, chinese_font_name="黑体", align="center", dq=10, dh=20)
+
+    p = docx.add_blank_paragraph(align="center")
+    p.add_pic("make_docx_demo/static/logo.png", width=3)
+
+    tb1 = Table(docx, rows=1, cols=3)
+    tb1.set_cell_text(row=0, column=1, cell_text=f"姓名:{student_name} 编号:{exercise_id}", set_all_cell=False, color="CCCCCC")
+    tb1.set_tb_colum_width(width=[100, 301, 100])
+
+    tb2 = Table(docx, rows=1, cols=3, tb_name="三张图表")
+    tb2.set_cell_text(0, 0, f"阅读速度\n{reading_speed}", bold=True, dq=10, dh=10)
+    tb2.set_cell_text(0, 1, f"阅读准确率%\n{reading_accuracy}", bold=True, dq=10, dh=10)
+    tb2.set_cell_text(0, 2, f"阅读等级\n{reading_level}", bold=True, dq=10, dh=10)
+    tb2.set_tb_colum_width(width=[167, 167, 167])
+
+    docx.add_blank_paragraph()
+
+    tb3 = Table(docx, rows=1, cols=3, border=True)
+    tb3.set_tb_colum_width(width=[167, 167, 167])
+
+    p1 = tb3.get_cell_paragraph(0, 0, dq=15, dh=15)
+    run1 = ParagraphBase(p1)
+    chart1_io = make_chart(x_axis_data=reading_speed_x_data, y_axis_datas=reading_speed_y_datas, title="阅读速度",
+                           sub_title_list=reading_speed_sub_title)
+    run1.add_pic(chart1_io, width=chart_width)
+    chart1_io.close() 
+
+    p2 = tb3.get_cell_paragraph(0, 1, dq=15, dh=15)
+    run2 = ParagraphBase(p2)
+    chart2_io = make_chart(x_axis_data=reading_accuracy_x_data, y_axis_datas=reading_accuracy_y_datas, title="阅读准确率",
+                           sub_title_list=reading_accuracy_sub_title)
+    run2.add_pic(chart2_io, width=chart_width)
+    chart2_io.close() 
+
+    p3 = tb3.get_cell_paragraph(0, 2, dq=15, dh=15)
+    run3 = ParagraphBase(p3)
+    chart3_io = make_chart(x_axis_data=reading_difficult_x_data, y_axis_datas=reading_difficult_y_datas, title="阅读难度",
+                           sub_title_list=reading_difficult_sub_title)
+    run3.add_pic(chart3_io, width=chart_width)
+    chart3_io.close() 
+
+    docx.add_blank_paragraph()
+
+    tb4 = Table(docx, rows=5, cols=5, border=True, tb_name="自主复习记录")
+   
+    tb4.set_table_width_xml([2000, 3000, 2000, 2000, 2000])
+
+    first_cell = tb4.get_cell(0, 0)
+    last_cell = tb4.get_cell(0, 4)
+    first_cell.merge(last_cell)
+    first_cell = tb4.get_cell(4, 0)
+    last_cell = tb4.get_cell(4, 4)
+    first_cell.merge(last_cell)
+
+    tb4.set_cell_text(0, 0, "自主复习记录", size=14, bold=True, color=(230, 230, 230), set_all_cell=False)
+
+    tb4.set_cell_text(1, 0, "模块", size=14, bold=True, color=(244, 244, 244), set_all_cell=False)
+    tb4.set_cell_text(1, 1, "复习建议", size=14, bold=True, color=(244, 244, 244), set_all_cell=False)
+    tb4.set_cell_text(1, 2, "10.1", size=14, bold=True, color=(244, 244, 244), set_all_cell=False)
+    tb4.set_cell_text(1, 3, "10.4", size=14, bold=True, color=(244, 244, 244), set_all_cell=False)
+    tb4.set_cell_text(1, 4, "10.7", size=14, bold=True, color=(244, 244, 244), set_all_cell=False)
+
+    tb4.set_cell_text(2, 0, "词汇学习", size=14, bold=True, set_all_cell=False)
+    tb4.set_cell_text(2, 1, "将单词、词义、例句朗读2遍", size=10, set_all_cell=False, align="left")
+
+    tb4.set_cell_text(3, 0, "阅读理解", size=14, bold=True, set_all_cell=False)
+    tb4.set_cell_text(3, 1, "1.文章朗读一遍\n2.口头翻译黑体词所在的句子", size=10, align="left")
+
+    tb4.set_cell_text(4, 0, "备注:完成相应复习后在对应格子中打√", size=10, bold=True, align="right")
+
+    docx.add_paragraph(f"生成日期:  {t_date_time}", align="center", dq=20)
+    docx.add_page_section()
+
+
+@time_use
+def section_2(docx: Word, json_data, *args, **kwargs):
+    docx.add_paragraph("致同学,家长们的一封信", bold=True, chinese_font_name="黑体", size=16, align="center", dq=5, dh=15)
+    docx.add_paragraph("各位同学、家长,你们好!", bold=True, size=12)
+    p1 = docx.add_blank_paragraph(dq=10, dh=10)
+    p1.add_run_to_p("关于我们:", chinese_font_name="黑体", bold=True, size=10)
+    p1.add_run_to_p("北京云知学科技有限公司是专注于", size=10)
+    p1.add_run_to_p("英语课堂信息化研究的高新技术企业", underline=True, size=10)
+    p1.add_run_to_p(",是教育部“数字校园综合解决方案”推荐单位。旗下品牌春笋英语是全国首家纸面化智能英语教学系统,合作学校超1000家,助力超50万名学生提升英语成绩。"
+                    "春笋英语个性化学案,落实双减政策,根据每个学生的实际水平,一对一定制学案内容,实现减负、增效。",
+                    size=10)
+
+    docx.add_blank_paragraph()
+
+    p2 = docx.add_blank_paragraph(dq=10, dh=5)
+    p2.add_pic(pic_path="make_docx_demo/static/pen.png", width=0.5, underline=True)
+    p2.add_run_to_p(" 为什么学好英语,而不是改学日语等小语种:", underline=True, size=10)
+    p3 = docx.add_blank_paragraph(dq=5, dh=5)
+    p3.add_run_to_p("1.专业限制:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("学习小语种在高考志愿填报时会有专业限制。例如,军校、警校和多数科技类专业通常只招收考英语的学生。\n", size=10)
+    p3.add_run_to_p("2.利于学业发展:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("大学毕业及考研都需要英语,考小语种进大学后会更吃力。\n", size=10)
+    p3.add_run_to_p("3.就业面窄:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("大多数就业机会以及公务员考试往往都有英语要求。\n", size=10)
+    p3.add_run_to_p("4.英语并不难:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("只要方法得当,半年时间就可以显著提高。\n", size=10)
+
+    p2 = docx.add_blank_paragraph(dq=10, dh=5)
+    p2.add_pic(pic_path="make_docx_demo/static/pen.png", width=0.5, underline=True)
+    p2.add_run_to_p(" 为什么学不好英语:方法不对", underline=True, size=10)
+    p3 = docx.add_blank_paragraph(dq=5, dh=5)
+    p3.add_run_to_p("1.单词+语法≠英语:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("单纯的记忆单词和语法规则并不足以理解英语文章,更不足以写出流畅的作文。\n", size=10)
+    p3.add_run_to_p("2.钻研英语考试≠学英语:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("考试技巧并不能提高英语水平,只能造成得分虚高的假象,使英语学习越来越缺乏后劲,分数越学越低。\n", size=10)
+    p3.add_run_to_p("3.英语是技能,用的多了就会了:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("就像每个英国人都会说英语一样,每个中国人也都可以学好英语。\n", size=10)
+
+    p2 = docx.add_blank_paragraph(dq=10, dh=5)
+    p2.add_pic(pic_path="make_docx_demo/static/pen.png", width=0.5, underline=True)
+    p2.add_run_to_p(" 怎样学好英语:", underline=True, size=10)
+    p3 = docx.add_blank_paragraph(dq=5, dh=5)
+    p3.add_run_to_p("1.大量阅读:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("足量的阅读是掌握英语“技能”的必由之路。\n", size=10)
+    p3.add_run_to_p("2.大量难度适宜的阅读:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p(
+        "多数同学看不懂文章只能猜答案,即使做了阅读题也没有真正读文章,所以阅读材料难度适宜非常关键,“看得懂”才能真正实现“大量的阅读”。\n",
+        size=10)
+    p3.add_run_to_p("3.在阅读过程中自动掌握单词,领悟语法:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("阅读量足够大,单词就自然掌握了,语法即使不知道概念也能理解其含义。\n", size=10)
+    p3.add_run_to_p("4.四个月为一个周期:", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("几十万的春笋英语学员的经历表明,只要听话照做,平均四个月就能", size=10)
+    p3.add_run_to_p("提升 20-30分", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("。\n", size=10)
+
+    p2 = docx.add_blank_paragraph(dq=10, dh=5)
+    p2.add_pic(pic_path="make_docx_demo/static/pen.png", width=0.5, underline=True)
+    p2.add_run_to_p(" 春笋英语践行的理念:", underline=True, size=10)
+    p3 = docx.add_blank_paragraph(dq=5, dh=5)
+    p3.add_run_to_p("要抓紧,别着急,不走捷径,踏实看懂每句话,提分就是水到渠成。\n", size=10)
+    p3.add_run_to_p("树立信心,有方法,有效果,有效率,100天脱胎换骨。", size=10)
+
+    docx.add_blank_paragraph(dq=2, dh=2)
+    docx.add_paragraph("北京云知学科技有限公司", align="right", size=10)
+   
+    docx.add_page_section()
+
+
+@time_use
+def section_3(docx: Word, json_data, *args, **kwargs):
+    docx.add_paragraph("春笋英语学案学习流程", chinese_font_name="黑体", bold=True, align="center", size=16)
+
+    p1 = docx.add_blank_paragraph(dq=10)
+    p1.add_run_to_p("1.认读\n", bold=True, size=10, chinese_font_name="黑体")
+    p1.add_run_to_p("按顺序朗读生词表两遍。\n", size=10)
+    p1.add_run_to_p("(1)用红笔在不会的单词序号上打星号,增加记忆。\n", size=10)
+    p1.add_run_to_p("(2)朗读例句,不认识的部分参照译文理解。\n", size=10)
+    p1.add_run_to_p("(3)不会念的单词拼读音标;音标不会拼读的,申请音标课,约5个小时即可。\n", size=10)
+
+    p2 = docx.add_blank_paragraph()
+    p2.add_run_to_p("2.速记\n", bold=True, size=10, chinese_font_name="黑体")
+    p2.add_run_to_p("(1)按顺序抄写,用黑笔完成,会的单词可以直接默写,不会的单词对照生词表进行抄写。提高效率,节约时间,不必犹豫纠结。\n", size=10)
+    p2.add_run_to_p("(2)抄写过程中尽量朗读或默读,不必额外增加抄写次数,切勿死记硬背。\n", size=10)
+
+    p3 = docx.add_blank_paragraph()
+    p3.add_run_to_p("3.例句练习\n", bold=True, size=10, chinese_font_name="黑体")
+    p3.add_run_to_p("翻译例句,可参考学生词表例句译文,在括号内写出单词汉语意思。\n", size=10)
+
+    p4 = docx.add_blank_paragraph()
+    p4.add_run_to_p("4.阅读\n", bold=True, size=10, chinese_font_name="黑体")
+    p4.add_run_to_p("(1)认清阅读与做题的关系:阅读是为了提高文本敏感度,做题只是为了检验阅读质量。\n", size=10)
+    p4.add_run_to_p(
+        "不要为加快答题而跳读略读,不要先读题目再回原文查找答案,逐字逐句读完全文再做题,日常学案训练不要利用答题技巧,作答时要回到原文确认内容,养成良好答题习惯。\n",
+        bold=True, size=10, chinese_font_name="黑体")
+    p4.add_run_to_p("(2)读轻松愉快的文章才是难度适当的,如果阅读吃力,直接申请调整难度(", size=10)
+    p4.add_run_to_p("可在筛查反馈表贴便贴留言", bold=True, size=10, chinese_font_name="黑体")
+    p4.add_run_to_p("),不要勉强,提高时间利用率。\n", size=10)
+    p4.add_run_to_p("(3)遇到生词,划线标记,事后查字典,把意思写在右侧标注区,不要写在原文上;生词太多的文章可以放弃,不要让查字典成为负担。\n",
+                    size=10)
+    p4.add_run_to_p("(4)黑体字、斜体字不认识,可以当场查阅生词表和标注。\n", size=10)
+    p4.add_run_to_p("(5)必须将做题开始和结束时间填写上,精确到秒。\n", size=10)
+    p4.add_run_to_p("(6)做题过程中,自己对于答案不理解、不确定的题目要标注出来。\n", size=10)
+
+    p5 = docx.add_blank_paragraph()
+    p5.add_run_to_p("5.核对答案\n", bold=True, size=10, chinese_font_name="黑体")
+    p5.add_run_to_p("红笔批改。正常应该全对,允许因疏忽偶尔错一题。\n", size=10)
+
+    p6 = docx.add_blank_paragraph()
+    p6.add_run_to_p("6.划筛查表、报告阅读数据\n", bold=True, size=10, chinese_font_name="黑体")
+    p6.add_run_to_p("和摸底一样的流程,", size=10)
+    p6.add_run_to_p("筛查表很重要,务必严格对待,实事求是,否则下一份学案学习内容会有偏差;", bold=True, size=10, chinese_font_name="黑体")
+    p6.add_run_to_p("务必填涂阅读用时和答对题数。\n", size=10)
+
+    p7 = docx.add_blank_paragraph()
+    p7.add_run_to_p("7.交卷\n", bold=True, size=10, chinese_font_name="黑体")
+    p7.add_run_to_p("学案做完后,将学案的最后一页筛查反馈表反馈给老师。\n", size=10)
+
+    p8 = docx.add_blank_paragraph()
+    p8.add_run_to_p("8.回顾\n", bold=True, size=10, chinese_font_name="黑体")
+    p8.add_run_to_p("回到学案第一页封面,在规定的时间内回顾学案内容并在自主复习记录表中登记(自主复习表在封面位置)。", size=10)
+
+    docx.add_page_section()
+
+
+@time_use
+def section_4(docx: Word, json_data, *args, **kwargs):
+    student_name = json_data.get("StudentInfo").get("StudentName", '')
+    title_info = "\n".join(json_data.get("Title"))
+
+    docx.add_paragraph(f"{student_name}  同学:", align="center", bold=True, dq=5, dh=5)
+
+    p1 = docx.add_blank_paragraph()
+    p1.add_run_to_p(title_info, size=10)
+
+    sub_title_maker(docx, "词汇精准学", "智能定制专属词汇,轻松学习")
+
+    tb = Table(docx, 1, 1, border=True, tb_name="词汇精准学")
+    tb.set_tb_colum_width(0, 460)
+    tb.set_cell_text(0, 0, "按顺序朗读生词表两遍。\n(1)用红笔在不会的单词序号上打星号,增加记忆。    (2)朗读例句,不认识的部分参照译文理解。",
+                     align="left", size=10, dq=10, dh=10)
+    docx.add_blank_paragraph()
+
+
+@time_use
+def section_4_1(docx: Word, json_data, *args, **kwargs):
+    def insert_content(row, col, data, qrcode_result: dict):
+
+        cell_outside = tb_outside.get_cell(row, col, delete_default_para=True)
+        tb_inside = Table(cell_outside, rows=5, cols=3, tb_name="内部内容")
+
+        tb_inside.merge_cell(0, 0, 0, 1)
+        tb_inside.merge_cell(1, 0, 1, 1) 
+        tb_inside.merge_cell(0, 2, 1, 2) 
+        tb_inside.merge_cell(2, 0, 2, 2) 
+        tb_inside.merge_cell(3, 0, 3, 2) 
+        tb_inside.merge_cell(4, 0, 4, 2) 
+
+       
+
+       
+        num_calucate = 2 * row + 1 if col == 0 else 2 * row + 2
+        p = ParagraphBase(tb_inside.get_cell_paragraph(0, 0, align="left"))
+        p.add_run_to_p(num_dict[num_calucate], bold=True, size=22, font_name="MS Gothic")
+        p.add_run_to_p(' ' + data[0], bold=True, size=20)
+        tb_inside.set_cell_text(row=1, column=0, cell_text=data[1] + "  " + data[2], border=False, size=10, align="left",
+                                bk_color=(240, 240, 240))
+
+       
+        image_io:BytesIO = qrcode_result.get(data[9], "") 
+        if image_io:
+            cell_p = tb_inside.get_cell_paragraph(0, 2, dq=5)
+            p_base = ParagraphBase(cell_p)
+            p_base.add_pic(image_io, width=1.5)
+            image_io.close()
+
+       
+        cell_p = tb_inside.get_cell_paragraph(2, 0, align="left")
+        cell_p_1 = ParagraphBase(cell_p)
+        cell_p_1.add_run_to_p(data[3], size=10, bold=True) 
+        cell_p_1.add_run_to_p("   " + data[4], size=8) 
+
+        cell_p = tb_inside.get_cell_paragraph(3, 0, align="left")
+        cell_p_1 = ParagraphBase(cell_p)
+        cell_p_1.add_run_to_p(data[5], size=10, bold=True)
+        cell_p_1.add_run_to_p(" " + data[6], size=8)
+
+        cell_p = tb_inside.get_cell_paragraph(4, 0, align="left")
+        cell_p_1 = ParagraphBase(cell_p)
+        cell_p_1.add_run_to_p(data[7], size=10, bold=True)
+        cell_p_1.add_run_to_p(" " + data[8], size=8)
+
+    properties_chinese_map = {"adj": "形容词", "n": "名词", "interj": "感叹词", "conj": "连词", "num": "数字", "art": "冠词",
+                              "pron": "代词", "adv": "副词", "prep": "介词", "v": "动词"}
+    strange_words_data = [] 
+    strange_words = json_data.get('StrangeWords')
+    qrcode_thread = [] 
+    qrcode_result = {}
+
+    for item in strange_words:
+        spell = item['Spell'] 
+        word_id = item['WordId'] 
+        en = "" if not item.get("SymbolsEn", "") else item.get("SymbolsEn")
+        am = "" if not item.get("SymbolsAm", "") else item.get("SymbolsAm")
+
+        symbols_en = "英" + f'[{en}]' 
+        symbols_am = "美" + f'[{am}]' 
+
+       
+        tts_url = f"https://dcjxb.yunzhixue.cn/exercise/word?id={word_id}"
+        t = Thread(target=qrcode_maker, args=(tts_url, qrcode_result))
+        qrcode_thread.append(t)
+        t.start()
+
+        word_properties = " ".join([properties_chinese_map.get(i, "") for i in item['WordProperties']]) 
+        word_meanings = item.get('Meaning', "") 
+        word_changes = ";".join([s["Type"] + ":" + s["Spell"] for s in item["WordChanges"]])
+
+        if item['Sentences']:
+            sentences = item['Sentences'][0]['English'] + '\n' + item['Sentences'][0]['Chinese']
+        else:
+            sentences = ""
+       
+        single_word_tuple = (spell, symbols_en, symbols_am, word_properties, word_meanings,
+                             "词汇变形", word_changes, "例句", sentences, tts_url)
+        strange_words_data.append(single_word_tuple)
+
+    rows = math.ceil(len(strange_words_data) / 2) 
+    tb_outside = Table(docx, rows=rows, cols=2, tb_name="外层框架")
+    tb_outside.set_tb_colum_width(width=[230, 230])
+
+    for t in qrcode_thread:
+        t.join()
+
+    for row in range(rows):
+        for col in range(2):
+            try:
+                data_item = strange_words_data.pop(0)
+                insert_content(row, col, data_item, qrcode_result)
+            except IndexError:
+                break
+
+    docx.add_page_section()
+
+
+@time_use
+def section_5(docx: Word, json_data, *args, **kwargs):
+   
+    copy_word_list = [i['Meaning'] for i in json_data.get('StrangeWords')]
+    random_copy_word_list = copy_word_list * 3 
+    shuffle(random_copy_word_list)
+
+   
+    first_copy_word_list = copy_word_list.copy()
+    copy_word_list_add_num = [f"{i} ({idx})" for idx, i in enumerate(first_copy_word_list, start=1)]
+    shuffle(copy_word_list_add_num)
+    total_copy_word_list = copy_word_list_add_num + random_copy_word_list
+
+    sub_title_maker(docx, "单词高效记", "会读会写才算真学会")
+    tb = Table(docx, 1, 1, tb_name="SuperMemo 速记", border=True)
+    tb.set_tb_colum_width(0, 460)
+    text = ["请先按照浅色水印显示的助记音节小声朗读,并在浅色水印上摹写,音节分隔点不用抄写。\n",
+            "请在横线上写下对应单词,每格写一遍,尽量默写,默写不出的,可查阅生词表;\n",
+            "书写时保持工整;每写完一个单词小声念一遍词义与单词。\n"]
+    cell_p = tb.get_cell_paragraph(0, 0, align="left")
+    p = ParagraphBase(cell_p)
+    p.add_run_to_p("                SuperMemo 速记\n", size=16, bold=True, )
+    for t in text:
+        p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+        p.add_run_to_p(t, size=10)
+    docx.add_blank_paragraph()
+
+    total_count = len(total_copy_word_list)
+    half_count = int(total_count / 2)
+    tb2 = Table(docx, half_count + 1, 4, tb_name="SuperMemo 速记下面的单词")
+    for row in range(total_count):
+        data = total_copy_word_list[row]
+        if row < half_count:
+            tb2.set_cell_text(row, 0, data, size=9, align="right", border=False, dq=2.5, dh=2)
+            tb2.set_cell_text(row, 1, str(row + 1) + "." + "_" * 20, size=9, align="left", border=False, dq=2.5, dh=2)
+        else:
+            tb2.set_cell_text(row - half_count, 2, data, size=9, align="right", border=False, dq=2.5, dh=2)
+            tb2.set_cell_text(row - half_count, 3, str(row + 1) + "." + "_" * 20, size=9, align="left", border=False, dq=2.5, dh=2)
+
+    tb2.set_tb_colum_width(width=[120, 110] * 2)
+
+    docx.add_page_section()
+
+
+@time_use
+def section_6(docx: Word, json_data, *args, **kwargs):
+    example_sentence = [f"{index}. {i['Sentences'][0]['English']}  ({i['Spell']})" for index, i in
+                        enumerate(json_data['StrangeWords'], start=1) if i['Sentences']]
+    sub_title_maker(docx, "例句填填看", "记词义,练拼写,学的快")
+    tb = Table(docx, 1, 1, tb_name="例句填填看", border=True)
+    tb.set_tb_colum_width(0, 460)
+    text = ["请在横线上写下单词在例句中的词义,若想不起来,可随时到例句答案表中查看。\n",
+            "参阅过答案的例句,请在句前的“□”中标记问号,以便复习回顾。\n",
+            "单词有多个意思的,应只填写适合语境的意思。"]
+    cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+    p = ParagraphBase(cell_p)
+    for t in text:
+        p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+        p.add_run_to_p(t, size=10)
+
+   
+
+    data = ["1. I have no chance to go sightseeing this summer.	(chance)",
+            "2. And with that, we conclude the third and final example.	(third)",
+            "3. He lives a healthy and normal life and has a strong body.	(healthy)",
+            "4. Popular music evolved from folk songs	(song)",
+            "5. The town is famous for its name.	(for)",
+            "6. Let's call room service, I need a bottle of wine.	(wine)",
+            "7. That was the beginning of my life's work.	(life)",
+            "8. The student wants to learn something.	(learn)",
+            "9. How long did the movie last?	(last)",
+            "10. I don't want to marry while I am studying.	(marry)",
+            "11. Each paragraph begins on a new line.	(paragraph)",
+            "12. Mike studies harder, so his grades are getting better and better.	(better)",
+            "13. Tutors are better and more helpful than school teachers in any fields a student respondent said.	(helpful)",
+            "14. Our team won a great victory and won the prize.	(prize)",
+            "15. His performance at the concert last night proved that he is in the top of international pianists.	(concert)"]
+
+    for i in example_sentence:
+        p = docx.add_blank_paragraph(dq=4,dh=4)
+        p.add_run_to_p("□  ", size=12,font_name="宋体")
+        p.add_run_to_p(i + "___________")
+
+    docx.add_page_section()
+
+
+@time_use
+def section_7(docx: Word, json_data, *args, **kwargs):
+   
+    def wanxing(index, article_single):
+        article_id = article_single['Id']
+        article_length = article_single['AllWordAmount']
+       
+        strange_words_ids = [i['MeanId'] for i in json_data['StrangeWords']]
+       
+        explanatory_words_ids = [i['MeaningId'] for i in article_single['ExplanatoryWords']]
+
+       
+        select_text = []
+        for ques_index, candidates in enumerate(article_single['Questions'], start=1):
+            single_select_text = ''
+            for s in candidates['Candidates']:
+                single_select_text += s['Label'] + '. '
+                participle = s['Participle'] 
+                if participle:
+                    single_select_text += participle + ' \n'
+                else:
+                    text = s['Text']
+                    single_select_text += text + ' \n'
+
+            select_text.append(f"{ques_index}. {single_select_text}")
+
+       
+        all_select_text = "\n".join(select_text)
+
+       
+        article_main: str = article_single['English'] + "\n\n郑重提示:认真看完全文再看问题。\n\n" + all_select_text
+        article_main_list = article_main.split(" ")
+
+       
+        explanatory_words = "\n\n".join(
+            [f"{index}. {i['Spell']} {i['SymbolsEn']} {i['SymbolsAm']} {i['Meaning']}" for index, i in
+             enumerate(article_single['ExplanatoryWords'], start=1)])
+
+        sub_title_maker(docx, "真题强化练", "智能匹配难度,轻松提升阅读")
+        tb = Table(docx, 1, 1, tb_name="真题强化练", border=True)
+        tb.set_tb_colum_width(0, 460)
+        text = ["练习中不认识的单词,尽量猜测词义,并用红笔加以标记。\n",
+                "答题完毕后,可查字典,并注释在右侧批注区,不要在原文上注释。复习时不必通读全文,结合上下文能回忆起标记词的词义即可,想不起的再对照批注区。\n",
+                "完形填空是优秀的测验题型,却不适合用于训练阅读能力和提升词汇量,所以建议在阅读能力(理解度、速度、难度)达标后再做完形填空题型练习。\n",
+                "阅读能力达标的同学,按三遍法做完形填空,基本可以达到满分。三遍法要求如下:\n",
+                "第一遍(理解):结合选项通读全文,以求理解文章主旨,但不动笔,以免形成成见。\n",
+                "第二遍(填空):通读全文,从候选词中选出适宜项目,将完整的单词填入空格,使文章连贯。\n",
+                "第三遍(核验):通读填空后的全文,确认上下文无矛盾之处。\n",
+                "三遍通读均应记录起讫时间,并将速度纳入能力考核项目。能力合格者,考试中也应有充裕时间完成以上 3 遍通读。\n",
+                "阅读计时从此处开始,请按顺序完成阅读,并注意记录时间。\n",
+                "\n"]
+
+        text2 = [f"全题长度(含问题及选项):{article_length}; 编号:{article_id};\n",
+                 "第一遍(理解)开始时间:_________________     第二遍(填空)开始时间:_________________\n",
+                 "第三遍(核验)开始时间:_________________"]
+        cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+        p = ParagraphBase(cell_p)
+        for t in text:
+            p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+            p.add_run_to_p(t, size=10)
+        for t2 in text2:
+            p.add_run_to_p(t2, size=10)
+
+        docx.add_blank_paragraph()
+
+        tb1 = Table(docx, 1, 3)
+        tb1.set_tb_colum_width(width=[90, 370, 5])
+
+        tb1_p = ParagraphBase(tb1.get_cell_paragraph(0, 0, align="left"))
+        tb1_p.add_pic("make_docx_demo/static/lianxi1.jpg", width=2.5)
+        tb1.set_cell_text(0, 1, f"本篇编号:{article_id},篇幅(含问题选项):{article_length} 词	阅读开始时间:  点	 分	 秒",
+                          size=9.5, underline=True, border=False, align="left")
+
+        tb2 = Table(docx, rows=1, cols=2, border=True, tb_name="完形填空")
+        tb2.set_tb_colum_width(width=[320, 140])
+
+       
+       
+        tb2_p = ParagraphBase(tb2.get_cell_paragraph(0, 0, align="left"))
+        for w in article_main_list:
+            word = re.search(r"\[(\d+)]", w)
+            if word:
+                w = w[:w.find('[')]
+                meaning_id = int(word.group(1))
+                if meaning_id in strange_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, bold=True)
+                elif meaning_id in explanatory_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, italic=True)
+                else: 
+                    tb2_p.add_run_to_p(w + ' ', size=10.5)
+            else:
+                tb2_p.add_run_to_p(w + ' ', size=10.5)
+
+        tb2.set_cell_text(0, 1, explanatory_words, size=10.5, font_color=(80, 80, 80), align="left")
+
+        docx.add_blank_paragraph()
+        tail_zhushi = """第一遍(理解)结束时间:__________用时:____秒
+第二遍(填空)结束时间:__________用时:____秒
+第三遍(核验)结束时间:__________用时:____秒
+总计用时:____分____秒
+
+阅读计时在此结束。
+今日总计阅读量 392 词,用时____秒,整份学案共有____个题目答对。答题用时、答对题数两项数据请抄写到筛查反馈表页脚。"""
+        docx.add_paragraph(tail_zhushi, size=10.5)
+        docx.add_blank_paragraph()
+
+   
+    def reading(index, article_single):
+       
+        all_article_length = 0
+
+        def single_yuedu(index, a):
+            article_id = a['Id']
+            article_length = a['AllWordAmount']
+            nonlocal all_article_length
+            all_article_length += article_length
+           
+            strange_words_ids = [i['MeanId'] for i in json_data['StrangeWords']]
+           
+            explanatory_words_ids = [i['MeaningId'] for i in a['ExplanatoryWords']]
+
+           
+            select_text = []
+            for ques_index, candidates in enumerate(a['Questions'], start=1):
+                single_select_text = ''
+               
+                subject = candidates['Subject'] + '\n' 
+                for s in candidates['Candidates']:
+                    single_select_text += s['Label'] + '. ' 
+                    participle = s['Participle'] 
+                    if participle:
+                        single_select_text += participle + ' \n'
+                    else:
+                        text = s['Text']
+                        single_select_text += text + ' \n'
+                select_text.append(str(ques_index) + ". " + subject + single_select_text)
+
+           
+            all_select_text = "\n".join(select_text)
+
+           
+            article_main: str = a['English'] + "\n\n郑重提示:认真看完全文再看问题。\n\n" + all_select_text
+            article_main_list = article_main.split(" ")
+
+           
+            explanatory_words = "\n\n".join(
+                [f"{index}. {i['Spell']} {i['SymbolsEn']} {i['SymbolsAm']} {i['Meaning']}" for index, i in
+                 enumerate(a['ExplanatoryWords'], start=1)])
+
+            tb1 = Table(docx, 1, 3, tb_name="图片小标题")
+            tb1.set_tb_colum_width(width=[90, 370, 5])
+
+            tb1_p = ParagraphBase(tb1.get_cell_paragraph(0, 0, align="left"))
+            tb1_p.add_pic(f"make_docx_demo/static/lianxi{index}.jpg", width=2.5)
+            tb1.set_cell_text(0, 1, f"本篇编号:{article_id},篇幅(含问题选项):{article_length} 词	阅读开始时间:  点	 分	 秒",
+                              size=9.5, underline=True, border=False, align="left")
+
+            tb2 = Table(docx, rows=1, cols=2, border=True, tb_name="七选五")
+            tb2.set_tb_colum_width(width=[320, 140])
+
+           
+            tb2_p = ParagraphBase(tb2.get_cell_paragraph(0, 0, align="left"))
+            for w in article_main_list:
+                word = re.search(r"\[(\d+)]", w)
+                if word:
+                    w = w[:w.find('[')]
+                    meaning_id = int(word.group(1))
+                    if meaning_id in strange_words_ids:
+                        tb2_p.add_run_to_p(w + ' ', size=10.5, bold=True)
+                    elif meaning_id in explanatory_words_ids:
+                        tb2_p.add_run_to_p(w + ' ', size=10.5, italic=True)
+                    else: 
+                        tb2_p.add_run_to_p(w + ' ', size=10.5)
+                else:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5)
+
+            tb2.set_cell_text(0, 1, explanatory_words, size=10.5, font_color=(80, 80, 80), align="left", centre=False)
+
+            docx.add_blank_paragraph()
+            tail_zhushi = """完成时间:_____点_____分_____秒,本篇用时:_____秒。"""
+            docx.add_paragraph(tail_zhushi, size=10.5)
+            docx.add_blank_paragraph()
+
+        "---------------------开始单篇运行---------------------"
+        if index == 1: 
+            sub_title_maker(docx, "阅读提升练", "智能匹配难度,轻松提升阅读", "春笋智学, 高效学习专家")
+           
+            tb = Table(docx, 1, 1, tb_name="真题强化练", border=True)
+            tb.set_tb_colum_width(0, 460)
+            text = ["阅读中不认识的单词,尽量猜测词义,并用红笔加以标记,以便日后快速回顾。\n",
+                    "读完全文后,可查字典,并抄在右侧批注区,不要在原文上注释。复习时不必通读全文,结合上下文能回忆起标记词的词义即可,想不起的再对照批注区。\n",
+                    "阅读训练的目的是提高对英语词、句、篇的敏感度,答题只是检验学习成果的手段,所以切勿为了快速做题而跳读、略读。阅读速度是很重要的训练指标,请在确实理解词句的基础上尽量提高阅读速度。只要平时扎实阅读,考试中不会没有时间认真读题。\n",
+                    "阅读计时从此处开始,请按顺序完成阅读,并注意记录时间。\n",
+                    ]
+            cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+            p = ParagraphBase(cell_p)
+            for t in text:
+                p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+                p.add_run_to_p(t, size=10)
+
+            docx.add_blank_paragraph()
+
+        single_yuedu(index, article_single)
+
+        tail_zhushi = f"""阅读计时在此结束。
+今日总计阅读量 {all_article_length} 词,用时________秒,整份学案共有_______个题目答对。
+答题用时、答对题数两项数据请抄写到筛查反馈表页脚。
+请翻译并熟读下列句子。熟读时停顿要合理,并做到大声、清晰、流畅、快速;口音可逐步调整,不必立刻追求十分标准。"""
+        docx.add_paragraph(tail_zhushi, size=10.5)
+        docx.add_blank_paragraph()
+
+   
+    def seven_to_five(index, article_single):
+        article_id = article_single['Id']
+        article_length = article_single['AllWordAmount']
+       
+        strange_words_ids = [i['MeanId'] for i in json_data['StrangeWords']]
+       
+        explanatory_words_ids = [i['MeaningId'] for i in article_single['ExplanatoryWords']]
+
+       
+        select_text = []
+        for ques_index, s_candidates in enumerate(article_single['Candidates'], start=1):
+            single_select_text = ''
+            single_select_text += s_candidates['Label'] + '. '
+            participle = s_candidates['Participle'] 
+            if participle:
+                single_select_text += participle
+            else:
+                text = s_candidates['Text']
+                single_select_text += text
+
+            select_text.append(f"{single_select_text}")
+
+       
+        all_select_text = "\n".join(select_text)
+
+       
+        article_main: str = article_single['English'] + "\n\n郑重提示:认真看完全文再看问题。\n\n" + all_select_text
+        article_main_list = article_main.split(" ")
+
+       
+        explanatory_words = "\n\n".join(
+            [f"{index}. {i['Spell']} {i['SymbolsEn']} {i['SymbolsAm']} {i['Meaning']}" for index, i in
+             enumerate(article_single['ExplanatoryWords'], start=1)])
+
+        sub_title_maker(docx, "阅读提升练", "智能匹配难度,轻松提升阅读", "春笋智学, 高效学习专家")
+        tb = Table(docx, 1, 1, tb_name="真题强化练", border=True)
+        tb.set_tb_colum_width(0, 460)
+        text = ["阅读中不认识的单词,尽量猜测词义,并用红笔加以标记。\n",
+                "读完全文后,可查字典,并抄在右侧批注区,不要在原文上注释。复习时不必通读全文,结合上下文能回忆起标记词的词义即可,想不起的再对照批注区。\n",
+                "7 选 5 题型是测试学生对文章理解程度的好题型,但因打破里文章的连贯性,故不是训练阅读能力的好素材。建议学生在阅读基本能力(理解度、速度、难度)达标后再开展 7 选 5 题型训练。若不能胜任本练习,请回到词汇与阅读训练,先打好基础。\n",
+                "阅读计时从此处开始,请按顺序完成阅读,并注意记录时间。\n",
+                ]
+        cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+        p = ParagraphBase(cell_p)
+        for t in text:
+            p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+            p.add_run_to_p(t, size=10)
+
+        docx.add_blank_paragraph()
+
+        tb1 = Table(docx, 1, 3, tb_name="图片小标题")
+        tb1.set_tb_colum_width(width=[90, 370, 5])
+
+        tb1_p = ParagraphBase(tb1.get_cell_paragraph(0, 0, align="left"))
+        tb1_p.add_pic("make_docx_demo/static/lianxi1.jpg", width=2.5)
+        tb1.set_cell_text(0, 1, f"本篇编号:{article_id},篇幅(含问题选项):{article_length} 词	阅读开始时间:  点	 分	 秒",
+                          size=9.5, underline=True, border=False, align="left")
+
+        tb2 = Table(docx, rows=1, cols=2, border=True, tb_name="七选五")
+        tb2.set_tb_colum_width(width=[320, 140])
+
+       
+        tb2_p = ParagraphBase(tb2.get_cell_paragraph(0, 0, align="left"))
+        for w in article_main_list:
+            word = re.search(r"\[(\d+)]", w)
+            if word:
+                w = w[:w.find('[')]
+                meaning_id = int(word.group(1))
+                if meaning_id in strange_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, bold=True)
+                elif meaning_id in explanatory_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, italic=True)
+                else: 
+                    tb2_p.add_run_to_p(w + ' ', size=10.5)
+            else:
+                tb2_p.add_run_to_p(w + ' ', size=10.5)
+
+        tb2.set_cell_text(0, 1, explanatory_words, size=10.5, font_color=(80, 80, 80), align="left")
+
+        docx.add_blank_paragraph()
+        tail_zhushi = """完成时间:_____点_____分_____秒,用时:_____秒
+阅读计时在此结束。
+今日总计阅读量 292 词,用时________秒,整份学案共有_______个题目答对。
+答题用时、答对题数两项数据请抄写到筛查反馈表页脚。"""
+        docx.add_paragraph(tail_zhushi, size=10.5)
+        docx.add_blank_paragraph()
+
+    "判断题型;根据题型选择----------------------------"
+    for index, article_single in enumerate(json_data['Articles'], start=1):
+        article_type = article_single['Category'] 
+
+        article_type_select = {1: reading, 2: wanxing, 3: seven_to_five}
+
+        assert article_type in article_type_select
+        article_type_select[article_type](index, article_single) 
+
+    docx.add_page_section()
+
+
+@time_use
+def section_8(docx: Word, json_data, *args, **kwargs):
+   
+    sub_title_maker(docx, "单词趣味填", "趣味练习,多维提升和巩固")
+    docx.add_pic_single_paragraph("make_docx_demo/static/happy_word.jpg", align="center", width=14.58)
+    docx.add_page_section()
+
+
+@time_use
+def section_9(docx: Word, json_data, *args, **kwargs):
+    def wanxing(index, article_single):
+        chinese_article = article_single['Chinese']
+        all_analysis = '' 
+
+        docx.add_paragraph("答案和解析", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+        text = "做阅读题的目的是锻炼理解英语文本的能力,答题只是检验理解程度的手段。请尽量根据所给题眼理解解题依据。若需要看汉语解析才能明白,你需要回到词汇与阅读训练,并从较低难度入手,以便打好基础。"
+        docx.add_paragraph(text, size=9)
+
+       
+        for ques_index, question_item in enumerate(article_single['Questions'], start=1):
+            analysis = question_item['Analysis'].strip() 
+            abcd_label = '' 
+
+            candidates = question_item['Candidates']
+            for abcd_selected in candidates:
+                if abcd_selected['IsRight']: 
+                    abcd_label += abcd_selected['Label'].strip()
+
+            all_analysis += f"{ques_index}.\n{abcd_label}  {analysis}\n" 
+
+        docx.add_paragraph(all_analysis, size=9)
+        docx.add_paragraph("全文参考译文", chinese_font_name="微软雅黑", dq=15, dh=5, bold=True)
+        docx.add_paragraph(chinese_article, size=9, dq=5, dh=5, line_spacing=300)
+
+   
+    def reading(index, article_single):
+        all_analysis = '' 
+        all_difficult_sentences = [] 
+
+        chinese_article = article_single['Chinese']
+
+       
+        for ques_index, question_item in enumerate(article_single['Questions'], start=1):
+            analysis = question_item['Analysis'].strip("\n") 
+            abcd_label = '' 
+
+            candidates = question_item['Candidates']
+            for abcd_selected in candidates:
+                if abcd_selected['IsRight']: 
+                    abcd_label += abcd_selected['Label'].strip("\n")
+
+            all_analysis += f"{ques_index}.{abcd_label}  {analysis}\n"
+
+       
+        all_analysis += '\n'
+
+       
+        for difficult_sentence_item in article_single['DifficultSentences']:
+            all_difficult_sentences.append(difficult_sentence_item['Chinese'])
+
+        docx.add_paragraph("答案和解析", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+        text = "做阅读题的目的是锻炼理解英语文本的能力,答题只是检验理解程度的手段。请尽量根据所给题眼理解解题依据。若需要看汉语解析才能明白,你可能需要暂时调低阅读难度。"
+        docx.add_paragraph(text, size=9)
+        docx.add_paragraph(all_analysis, size=9)
+
+        docx.add_paragraph("难句翻译参考译文", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+        for index_, difficult_sentence in enumerate(all_difficult_sentences, start=1):
+            docx.add_paragraph(f'{index_}. {difficult_sentence}', size=9)
+
+        docx.add_paragraph("全文参考译文", chinese_font_name="微软雅黑", dq=15, dh=5, bold=True)
+        docx.add_paragraph(f"Passage {index}", chinese_font_name="微软雅黑", dq=15, dh=5, bold=True)
+        docx.add_paragraph(chinese_article, size=9)
+
+    def seven_to_five(index, article_single):
+        chinese_article = article_single['Chinese']
+        all_analysis = '' 
+
+        docx.add_paragraph("答案和解析", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+        text = "做阅读题的目的是锻炼理解英语文本的能力,答题只是检验理解程度的手段。请尽量根据所给题眼理解解题依据。若需要看汉语解析才能明白,你需要回到词汇与阅读训练,并从较低难度入手,以便打好基础。"
+        docx.add_paragraph(text, size=9)
+       
+        for q_index, question_item in enumerate(article_single['Questions'], start=1):
+            analysis = question_item['Analysis'] 
+            abcd_label = '' 
+
+            candidates = question_item['Candidates']
+            for abcd_selected in candidates:
+                if abcd_selected['IsRight']: 
+                    abcd_label += abcd_selected['Label']
+            all_analysis += f"{q_index}.{abcd_label}  {analysis}\n"
+
+        docx.add_paragraph(all_analysis, size=9)
+        docx.add_paragraph("全文参考译文", chinese_font_name="微软雅黑", dq=15, dh=5, bold=True)
+        docx.add_paragraph("Passage 1", chinese_font_name="微软雅黑", dq=15, dh=5, bold=True)
+        docx.add_paragraph(chinese_article, size=9, dq=5, dh=5, line_spacing=300)
+
+    "判断题型;根据题型选择----------------------------"
+    sub_title_maker(docx, "解题自主纠", "自主学习,逐步养成良好学习习惯")
+    for index, article_single in enumerate(json_data['Articles'], start=1):
+        article_type = article_single['Category'] 
+        article_type_select = {1: reading, 2: wanxing, 3: seven_to_five}
+        assert article_type in article_type_select
+        article_type_select[article_type](index, article_single) 
+        docx.add_blank_paragraph()
+
+    docx.add_docx_component("make_docx_demo/word_component/blank.docx")
+    docx.add_page_section()
+
+
+@time_use
+def section_10(docx: Word, json_data, scanpage_format, **kwargs):
+    docx.add_paragraph("☆ 请写出词义,再对照筛査表批改。词义顺序可互换;答案意思相符即可,不要求一字不差。批改结果眷抄到筛査表。", size=9, dq=2,
+                       dh=2)
+    tb = Table(docx, 50, 4, tb_name="写出词义")
+    tb.set_tb_colum_width(width=[110, 120, 110, 120])
+
+    for row in range(50):
+        tb.set_cell_text(row, 0, str(row + 1) + " " + "rich", size=8.5, dq=1, dh=1, border=False)
+        tb.set_cell_text(row, 1, "□________________", size=10, dq=0, dh=0, border=False)
+        tb.set_cell_text(row, 2, str(row + 51) + " " + "rich", size=8.5, dq=1, dh=1, border=False)
+        tb.set_cell_text(row, 3, "□________________", size=10, dq=0, dh=0, border=False)
+    tb.set_row_height(13)
+    docx.add_page_break()
+
+    docx.add_paragraph("☆ 请在需要加强学习的词义前方框中划线,两头各超出 1 毫米为宜(示例:□☑52.example);请保持本表整洁并交回。", size=9, dq=2,
+                       dh=2)
+    tb2 = Table(docx, 25, 8, tb_name="划线表")
+    tb2.set_tb_colum_width(width=[57.5] * 8)
+
+    docx.add_blank_paragraph(dq=5, dh=5)
+
+    for row in range(25):
+        tb2.set_cell_text(row, 0, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 1, "星期二", size=8.5, border="right", dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 2, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 3, "星期二", size=8.5, border="right", dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 4, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 5, "星期二", size=8.5, border="right", dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 6, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 7, "星期二", size=8.5, border=False, dq=1.2, dh=1.2)
+
+    docx.set_page_column(5)
+    docx.add_docx_component("make_docx_demo/word_component/component.docx")
+    docx.end_page_column()
+
+    if scanpage_format == 3:
+        docx.add_page_section()
+
+
+def two_check_page(docx: Word, json_data, **kwargs):
+   
+    def empty_filter_page(class_name, student_name, page_title, page_sub_title, t_datetime, word_data_list):
+        if len(word_data_list) % 2 != 0:
+            word_data_list.append("") 
+
+        tb = Table(docx, 1, 3, tb_name="头部三元素")
+        tb.set_tb_colum_width(width=[40, 100, 100])
+
+        p_cell = tb.get_cell_paragraph(0, 0, dq=10)
+        p = ParagraphBase(p_cell)
+        p.add_pic("make_docx_demo/static/logo2.png", width=Inches(1.2)) 
+
+        tb.set_cell_text(0, 1, f"{class_name}\n{student_name}", border=False, size=8, dh=2)
+        tb.set_cell_text(0, 2, f"{page_title}\n{page_sub_title}", border=False, size=8, dh=2)
+
+        docx.add_paragraph("请写出词义,再对照筛查表批改。词义顺序可互换;答案意思相符即可,不要求一字不差。批改结果誊抄到筛查表。", size=9)
+
+        tb = Table(docx, rows=0, cols=4, tb_name="第一页筛查表")
+
+        tb.set_all_border_fastly(xml=True, outside_side_border=True, outside_side_border_size=5)
+
+        half_count = int(len(word_data_list) / 2) 
+        for index,row in enumerate(range(half_count)):
+            first_word, second_word = word_data_list[row],word_data_list[row + half_count]
+            cell3 = f"{index + 1 + half_count}. {second_word}" if second_word else ""
+            cell4 = "□ ___________________________" if second_word else ""
+
+            data = [f"{index + 1}. {first_word}", "□ ___________________________", cell3, cell4]
+            tb.add_table_row_data_xml_fastly(data, font_size=[10.5, 9, 10.5, 9])
+        tb.set_row_height(13.8)
+        tb.set_table_width_xml([2124, 3257, 2140, 3257])
+        blank_count = " " * 80
+        p = docx.add_blank_paragraph(dq=5)
+        p.add_run_to_p(f"{t_datetime} {page_title}-{page_sub_title}{blank_count}", size=8, chinese_font_name="仿宋", font_name="仿宋")
+        docx.add_page_break() 
+
+   
+    def filter_table_page(page_id, class_name, student_name, page_title, page_sub_title, t_datetime,
+                          foot_description, foot_description2, article_type, word_data_list2):
+        if len(word_data_list2) % 2 != 0:
+            word_data_list2.append(["", ""]) 
+
+        tb = Table(docx, 1, 5, tb_name="头部五元素")
+        tb.set_tb_colum_width(width=[40, 130, 130, 150, 70])
+
+        p_cell = tb.get_cell_paragraph(0, 0, dq=10)
+        p = ParagraphBase(p_cell)
+        p.add_pic("make_docx_demo/static/logo2.png", width=Inches(1.2))
+
+        tb.set_cell_text(0, 1, f"{class_name}\n{student_name}", border=False, size=8)
+        tb.set_cell_text(0, 2, f"{page_id}", border=False, size=16, dh=2, bold=True, font_name="黑体")
+        tb.set_cell_text(0, 3, f"{page_title}\n{page_sub_title}", border=False, size=8)
+
+        p_cell = tb.get_cell_paragraph(0, 4)
+        p = ParagraphBase(p_cell)
+        p.add_pic(qrcode_maker(f"{page_id}"), width=Inches(0.5))
+
+        pp = docx.add_blank_paragraph()
+        p_base = ParagraphBase(pp)
+        p_base.p.add_run_to_p("请在需要加强学习的词义前方框中划线,两头各超出1毫米为宜(示例:", size=9)
+        p_base.p.add_pic("make_docx_demo/static/line_example.png", width=Inches(0.8))
+        p_base.p.add_run_to_p(" );请保持本表整洁并交回。", size=9)
+
+        tb = Table(docx, rows=0, cols=4, tb_name="第二页筛查表")
+
+        ## 1234横着放
+       
+       
+       
+        #
+       
+       
+        #
+       
+       
+
+        ## 1234竖着放
+        total_row = int(len(word_data_list2) / 2)
+        for row in range(total_row):
+            spell1, meaning1 = word_data_list2[row]
+            spell2, meaning2 = word_data_list2[total_row + row]
+
+            cell3 = f"{total_row + row + 1}. {spell2}" if spell2 else ""
+            cell4 = f"□ {meaning2}" if meaning2 else ""
+
+            data = [f"{row + 1}. {spell1}", f"□ {meaning1}", cell3, cell4] 
+            tb.add_table_row_data_xml_fastly(data, font_size=[10.5, 9, 10.5, 9])
+
+        tb.set_all_border_fastly(xml=True, outside_side_border=True, outside_side_border_size=5)
+        tb.set_row_height(13.6)
+        tb.set_table_width_xml([2124, 3257, 2140, 3257])
+        if article_type == 1: 
+            docx.add_paragraph(f"{t_datetime} {page_title}-{page_sub_title}{foot_description}", size=8, chinese_font_name="仿宋",
+                               font_name="仿宋", dq=5)
+            docx.add_paragraph(foot_description2, align="right", size=8, chinese_font_name="仿宋")
+        else:
+            docx.add_paragraph(f"{t_datetime} {page_title}-{page_sub_title}{foot_description}", size=8, chinese_font_name="仿宋",
+                               font_name="仿宋", dq=5)
+
+
+   
+    student_name = json_data.get("StudentInfo").get("StudentName", '') 
+    class_name = json_data.get("StudentInfo").get("ClassName", '') 
+    t_datetime = time.strftime("%Y-%m-%d %H:%M", time.localtime()) 
+    article_type = json_data['Articles'][0]['Category'] 
+    is_add_empty_filter_page = json_data['Config']['AddEmptyFilterPage'] 
+
+    """---------------------------------------------------------------------------------"""
+    for index, page in enumerate(json_data['ScreeningScanPages'], start=1):
+        page_id = str(page['PageId']).rjust(11, "0")
+
+       
+        if index >= 2:
+            docx.add_page_break()
+
+        page_title = page['Title'] 
+        page_sub_title = page['SubTitle'] 
+        foot_description = page['FootDescription'] 
+        foot_description2 = page['FootDescription2'] 
+
+        word_data_list1 = []
+        word_data_list2 = []
+        for i in page['FilterTable']['Items']: 
+            word_data_list1.append(i['Spell'])
+            word_data_list2.append([i['Spell'], i['Meaning']])
+
+       
+        if is_add_empty_filter_page:
+            empty_filter_page(class_name, student_name, page_title, page_sub_title, t_datetime, word_data_list1)
+
+       
+        filter_table_page(page_id, class_name, student_name, page_title, page_sub_title, t_datetime,
+                          foot_description, foot_description2, article_type, word_data_list2)
+
+
+@time_use
+def other(docx, json_data, **kwargs):
+   
+    sections = docx.doc.sections
+    for section in sections[:-1]:
+        section.top_margin = Inches(0.3)
+        section.bottom_margin = Inches(0.3)
+        section.left_margin = Inches(0.8)
+        section.right_margin = Inches(0.8)
+
+    sections[-1].top_margin = Inches(0.1)
+    sections[-1].bottom_margin = Inches(0.1)
+    sections[-1].left_margin = Inches(0.5)
+    sections[-1].right_margin = Inches(0.5)
+
+    header_maker(docx, json_data)
+
+
+def start_make_word(json_data, document_format, scanpage_format):
+    try:
+        docx = Word(save_file_name="develop.docx", start_template_name="make_docx_demo/word_component/start_template.docx")
+        menu = [section_1, section_2, section_3, section_4, section_4_1,
+                section_5, section_6, section_7, section_8, section_9, ]
+        if scanpage_format == 1:
+            menu.append(two_check_page)
+        elif scanpage_format == 2:
+            menu.append(section_10)
+        elif scanpage_format == 3:
+            menu.append(section_10)
+            menu.append(two_check_page)
+        menu.append(other)
+
+        for s in menu:
+            s(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+
+       
+        docx.save_docx()
+        if document_format == 1:
+            return "develop.docx"
+        else:
+            convert_word_to_pdf("develop")
+            return "develop.pdf"
+
+    except Exception as e:
+        log_err_e(e)
+
+
+if __name__ == '__main__':
+    import os
+
+    t = time.time()
+    os.chdir("..")
+
+   
+    start_make_word(test_json5, 1, 1)
+    print(time.time() - t)

+ 1162 - 0
make_docx_demo/main_word_applet.py

@@ -0,0 +1,1162 @@
+# -*- coding: UTF-8 -*-
+"""专为鲍利提分小程序,制作的word文档;apifox接口在-单词教学宝-词汇突击学案文档生成接口"""
+
+import time
+import re
+import os
+import math
+import yaml
+from random import randint, shuffle
+
+from docx.shared import Pt, Inches, Cm, RGBColor
+from docx.enum.text import WD_COLOR_INDEX
+from make_docx_demo.data import *
+from docx_base import Word, Table, hex_to_rgb, rgb_to_hex, ParagraphBase
+from make_docx_demo.docx_other_func import time_use, qrcode_maker, get_weekday
+from tools.loglog import logger, log_err_e
+from make_docx_demo.word2pdf import convert_word_to_pdf
+from make_docx_demo.get_standard_data import get_standard_data
+from common.split_text import split_text_to_word_punctuation
+from config.read_config import address
+
+num_dict = {1: "❶", 2: "❷", 3: "❸", 4: "❹", 5: "❺", 6: "❻", 7: "❼", 8: "❽", 9: "❾",
+            10: "❿", 11: "⓫", 12: "⓬", 13: "⓭", 14: "⓮", 15: "⓯", 16: "⓰", 17: "⓱", 18: "⓲", 19: "⓳", 20: "⓴"}
+
+
+
+@time_use
+def header_maker(docx: Word, json_data):
+    exercise_id = str(json_data.get("ExerciseId", "")).rjust(11, "0") 
+    exercise_title = json_data.get("ExerciseTitle", "") 
+    exercise_level = json_data['StudentInfo']['StudentStudy']['ReadingLevel'] 
+
+    student_name = json_data.get("StudentInfo").get("StudentName", '')
+    class_name = json_data.get("StudentInfo").get("ClassName", '').replace("词汇突击", "")
+    t_date = time.strftime("%Y-%m-%d", time.localtime())
+    t_weekday = get_weekday()
+    t_time = time.strftime("%H:%M:%S", time.localtime())
+
+    for i in range(1, len(docx.doc.sections) - 1):
+        tb_header = docx.add_header_table(rows=1, cols=5, section_index=i, tb_name="页眉表格")
+
+        tb_header.set_cell_text(0, 0, "鲍利提分", bold=True, size=16, color=(220, 220, 220), border=False, chinese_font_name="黑体")
+        tb_header.set_cell_text(0, 1, f"{class_name}\n{student_name}", size=8, border=False, color=(220, 220, 220))
+        tb_header.set_cell_text(0, 2, f"词汇训练\n{exercise_level}级", size=8, border=False, color=(220, 220, 220))
+        tb_header.set_cell_text(0, 3, f"{exercise_id}", bold=True, size=24, border=False, color=(220, 220, 220))
+        tb_header.set_cell_text(0, 4, f"{t_date}\n{t_weekday}\n{t_time}", size=8, border=False, color=(220, 220, 220))
+
+        tb_header.set_tb_colum_width(width=[100, 70, 70, 150, 80])
+       
+
+    target_section = docx.doc.sections[-1] 
+    target_section.header.is_linked_to_previous = False
+   
+    for paragraph in target_section.header.paragraphs:
+        paragraph.clear() 
+   
+   
+
+    target_section.header_distance = 0
+    target_section.footer_distance = 280000
+
+
+@time_use
+def sub_title_maker(docx: Word, main_title, sub_title_name1, sub_title_name2='鲍利提分,你的智能教练'):
+    p = docx.add_blank_paragraph()
+    line_width = 200
+    main_rect_x = line_width + 10
+    main_rect_width = 150 
+
+    right_line_x = main_rect_x + main_rect_width + 10 
+
+    p.add_rectangle(main_title, x=main_rect_x, y=4, fill_color="000000", width=main_rect_width, height=48, font_color="ffffff",
+                    font_size=18)
+    p.add_rectangle("", x=0, y=50, boder_color="808080", width=line_width, height=2)
+    p.add_rectangle("", x=right_line_x, y=50, boder_color="808080", width=line_width, height=2)
+
+    p.add_rectangle(f"【{sub_title_name1}】", x=0, y=20, width=line_width, height=40, font_size=8, chinese_font="宋体")
+    p.add_rectangle(sub_title_name2, x=right_line_x, y=20, width=line_width, height=40, font_color="808080", font_size=8,
+                    chinese_font="宋体")
+    docx.add_blank_paragraph()
+    docx.add_blank_paragraph()
+    docx.add_blank_paragraph()
+
+
+@time_use
+def section_1(docx: Word, json_data, *args, **kwargs):
+   
+    exercise_id_int = json_data.get("ExerciseId", "") 
+    student_name = json_data.get("StudentInfo").get("StudentName", '') 
+    student_stage = json_data.get("StudentInfo").get("StudentStage") 
+    grade_name = {1: "小学", 2: "初中", 3: "高中"}.get(student_stage)
+    t_date_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) 
+
+   
+    totalVocabulary, readingAccuracy, readingLevel, readingSpeed = get_standard_data(student_stage)
+
+    FirstVocabulary = json_data['StudentInfo']['StudentStudy']['FirstVocabulary'] 
+    Vocabulary = json_data['StudentInfo']['StudentStudy']['Vocabulary'] 
+    ReadingVolume = json_data['StudentInfo']['StudentStudy']['ReadingVolume'] 
+
+   
+   
+   
+    r6 = json_data['StudentInfo']['StudentStudy']['ReadingLevel'] 
+
+   
+    r7 = len([strange_words for exercise in json_data['WordAndArticleContents'] for strange_words in exercise['StrangeWords']]) 
+    r8 = r6 
+    multi_article_difficulty = [article_obj['Score'] for article_obj in json_data['WordAndArticleContents'][0]['Articles']] 
+    difficulty_value = sum(multi_article_difficulty) // len(multi_article_difficulty) if multi_article_difficulty else 0 
+
+    "开始版面-------------------------------------------------"
+
+    docx.add_paragraph(text="鲍利提分个性化学案", size=20, align="center", bold=True)
+    docx.add_paragraph(text="AI解码英语基因,智能重组高分密码", size=14, align="center")
+
+    docx.add_blank_paragraph()
+    docx.add_paragraph(text="学生基本情况", size=16, align="left", bold=True, dq=10, dh=5)
+    t1 = Table(docx, 0, 3, border=True, tb_name="学生基本情况")
+    t1.add_table_row_data_xml_fastly(["姓名", "年级", "初始词汇量"])
+    t1.add_table_row_data_xml_fastly([student_name, grade_name, FirstVocabulary])
+    t1.add_table_row_data_xml_fastly(["当前词汇量", "学段总词汇量", "累计阅读量"])
+    t1.add_table_row_data_xml_fastly([Vocabulary, totalVocabulary, ReadingVolume])
+    t1.set_all_border_fastly(xml=True)
+    t1.set_ALIGN_VERTICAL()
+    t1.set_row_height(row_height=20)
+
+    docx.add_blank_paragraph()
+    docx.add_paragraph(text="本次学案难度情况", size=16, align="left", bold=True, dq=10, dh=5)
+    t3 = Table(docx, 0, 4, border=False, tb_name="本次学案难度情况")
+    t3.add_table_row_data_xml_fastly(["指标", "生词数量", "阅读难度等级", "文章词汇难度值"])
+    t3.add_table_row_data_xml_fastly(["本次内容", f"{r7}个", r8, difficulty_value])
+    t3.set_all_border_fastly(xml=True)
+    t3.set_ALIGN_VERTICAL()
+    t3.set_row_height(row_height=20)
+
+    docx.add_blank_paragraph()
+
+    docx.add_paragraph(text="练习提醒Tips", size=16, align="left", bold=True, dq=10, dh=5)
+    t4 = Table(docx, 0, 1, border=False, tb_name="本次学案难度情况")
+    text = "请认真阅读,不可急于求成,要确保能够理解每一句话,不要满足于略知概要,不要跳读略读,不要猜答案,加油!"
+    t4.add_table_row_data_xml_fastly([text], )
+    t4.set_all_border_fastly(xml=True)
+    t4.set_ALIGN_VERTICAL()
+    t4.set_row_height(row_height=50)
+    t4.set_tb_colum_width(0, 500)
+
+   
+    docx.add_paragraph(text="多媒体辅助", size=16, align="left", bold=True, dq=10, dh=5)
+    docx.add_paragraph(text="需要示范的的学员,扫以下二维码获取音频、视频示范:", size=12, align="left", dq=5, dh=5)
+    p = docx.add_blank_paragraph()
+
+   
+    img_io = qrcode_maker(full_url=f"{address}/link?type=exercise&id={exercise_id_int}&from=bltf")
+    p.add_pic(img_io, width=2)
+    img_io.close()
+
+    docx.add_paragraph(text=f"生成时间: {t_date_time}", size=12, align="left", dq=10)
+
+    docx.add_page_section()
+
+
+@time_use
+def section_4(docx: Word, json_data, *args, **kwargs):
+    student_name = json_data.get("StudentInfo").get("StudentName", '')
+    title_info = "\n".join(json_data.get("Title"))
+
+    if title_info:
+        docx.add_paragraph(f"{student_name}  同学:", align="center", bold=True, dq=5, dh=5)
+        p1 = docx.add_blank_paragraph()
+        p1.add_run_to_p(title_info, size=10)
+
+    sub_title_maker(docx, "词汇精准学", "智能定制你的专属英语DNA图谱")
+
+    tb = Table(docx, 1, 1, border=True, tb_name="词汇精准学")
+    tb.set_tb_colum_width(0, 460)
+    tb.set_cell_text(0, 0, "按顺序朗读生词表两遍。\n(1)用红笔在不会的单词序号上打星号,增加记忆。\n(2)朗读例句,不认识的部分参照译文理解。",
+                     align="left", size=10, dq=10, dh=10)
+    docx.add_blank_paragraph()
+
+
+@time_use
+def section_4_1(docx: Word, json_data, *args, **kwargs):
+    def insert_content(row, col, data, qrcode_result: dict):
+
+        cell_outside = tb_outside.get_cell(row, col, delete_default_para=True)
+        tb_inside = Table(cell_outside, rows=5, cols=3, tb_name="内部内容")
+
+       
+        tb_inside.merge_cell(0, 0, 0, 2)
+        tb_inside.merge_cell(1, 0, 1, 2) 
+       
+        tb_inside.merge_cell(2, 0, 2, 2) 
+        tb_inside.merge_cell(3, 0, 3, 2) 
+        tb_inside.merge_cell(4, 0, 4, 2) 
+
+       
+
+       
+        num_calucate = 2 * row + 1 if col == 0 else 2 * row + 2
+        p = ParagraphBase(tb_inside.get_cell_paragraph(0, 0, align="left"))
+        p.add_run_to_p(num_dict[num_calucate], bold=True, size=22, font_name="MS Gothic")
+        p.add_run_to_p(' ' + data[0], bold=True, size=20)
+        tb_inside.set_cell_text(row=1, column=0, cell_text=data[1] + "  " + data[2], border=False, size=10, align="left",
+                                bk_color=(240, 240, 240))
+
+       
+       
+       
+       
+       
+       
+       
+
+       
+        cell_p = tb_inside.get_cell_paragraph(2, 0, align="left")
+        cell_p_1 = ParagraphBase(cell_p)
+        cell_p_1.add_run_to_p(data[3], size=10, bold=True) 
+        cell_p_1.add_run_to_p("   " + data[4], size=8) 
+
+        cell_p = tb_inside.get_cell_paragraph(3, 0, align="left")
+        cell_p_1 = ParagraphBase(cell_p)
+        cell_p_1.add_run_to_p(data[5], size=10, bold=True)
+        cell_p_1.add_run_to_p(" " + data[6], size=8)
+
+        cell_p = tb_inside.get_cell_paragraph(4, 0, align="left")
+        cell_p_1 = ParagraphBase(cell_p)
+        cell_p_1.add_run_to_p(data[7], size=10, bold=True)
+        cell_p_1.add_run_to_p(" " + data[8], size=8)
+
+    properties_chinese_map = {"adj": "形容词", "n": "名词", "interj": "感叹词", "conj": "连词", "num": "数字", "art": "冠词",
+                              "pron": "代词", "adv": "副词", "prep": "介词", "v": "动词"}
+    strange_words_data = [] 
+    strange_words = json_data.get('StrangeWords')
+    qrcode_thread = [] 
+    qrcode_result = {}
+
+    for item in strange_words:
+        spell = item['Spell'] 
+        word_id = item['WordId'] 
+        en = "" if not item.get("SymbolsEn", "") else item.get("SymbolsEn")
+        am = "" if not item.get("SymbolsAm", "") else item.get("SymbolsAm")
+
+        symbols_en = "英" + f'[{en}]' 
+        symbols_am = "美" + f'[{am}]' 
+
+       
+       
+       
+       
+       
+
+        word_properties = " ".join([properties_chinese_map.get(i, "") for i in item['WordProperties']]) 
+        word_meanings = item.get('Meaning', "") 
+        word_changes_list = []
+        for idx, s in enumerate(item["WordChanges"],start=1):
+            s_type,s_spell = s['Type'], s['Spell']
+            if "原型" in s_type or "大小写" in s_type:
+                continue
+            tail = '\n' if idx != len(item["WordChanges"]) else ''
+            word_changes_list.append(f"{s_spell}  {s_type}{tail}")
+        word_changes = "".join(word_changes_list)
+
+        if item['Sentences']:
+            sentences = item['Sentences'][0]['English'] + '\n' + item['Sentences'][0]['Chinese']
+        else:
+            sentences = ""
+       
+        single_word_tuple = (spell, symbols_en, symbols_am, word_properties, word_meanings,
+                             "词汇变形", word_changes, "例句", sentences)
+        strange_words_data.append(single_word_tuple)
+
+    rows = math.ceil(len(strange_words_data) / 2) 
+    tb_outside = Table(docx, rows=rows, cols=2, tb_name="外层框架")
+    tb_outside.set_tb_colum_width(width=[230, 230])
+
+    for t in qrcode_thread:
+        t.join()
+
+    for row in range(rows):
+        for col in range(2):
+            try:
+                data_item = strange_words_data.pop(0)
+                insert_content(row, col, data_item, qrcode_result)
+            except IndexError:
+                break
+
+    docx.add_page_section()
+
+
+@time_use
+def section_5(docx: Word, json_data, *args, **kwargs):
+   
+    copy_word_list = [i['Meaning'] for i in json_data.get('StrangeWords')]
+    random_copy_word_list = copy_word_list * 3 
+    shuffle(random_copy_word_list)
+
+   
+    first_copy_word_list = copy_word_list.copy()
+    copy_word_list_add_num = [f"{i} ({idx})" for idx, i in enumerate(first_copy_word_list, start=1)]
+    shuffle(copy_word_list_add_num)
+    total_copy_word_list = copy_word_list_add_num + random_copy_word_list
+
+    sub_title_maker(docx, "单词高效记", "会读会写才算真学会")
+    tb = Table(docx, 1, 1, tb_name="高效速记", border=True)
+    tb.set_tb_colum_width(0, 460)
+    text = ["请在横线上写下对应单词,每格写一遍,尽量默写,默写不出的,可查阅生词表;\n",
+            "书写时保持工整;每写完一个单词小声念一遍词义与单词。\n"]
+    cell_p = tb.get_cell_paragraph(0, 0, align="left")
+    p = ParagraphBase(cell_p)
+    p.add_run_to_p("                        高效速记\n", size=16, bold=True, )
+    for t in text:
+        p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+        p.add_run_to_p(t, size=10)
+    docx.add_blank_paragraph()
+
+    total_count = len(total_copy_word_list)
+    half_count = int(total_count / 2)
+    tb2 = Table(docx, half_count + 1, 4, tb_name="高效速记下面的单词")
+    for row in range(total_count):
+        data = total_copy_word_list[row]
+        if row < half_count:
+            tb2.set_cell_text(row, 0, data, size=9, align="right", border=False, dq=2.5, dh=2)
+            tb2.set_cell_text(row, 1, str(row + 1) + "." + "_" * 20, size=9, align="left", border=False, dq=2.5, dh=2)
+        else:
+            tb2.set_cell_text(row - half_count, 2, data, size=9, align="right", border=False, dq=2.5, dh=2)
+            tb2.set_cell_text(row - half_count, 3, str(row + 1) + "." + "_" * 20, size=9, align="left", border=False, dq=2.5, dh=2)
+
+    tb2.set_tb_colum_width(width=[120, 110] * 2)
+
+    docx.add_page_section()
+
+
+@time_use
+def section_6(docx: Word, json_data, *args, **kwargs):
+    example_sentence = [f"{index}. {i['Sentences'][0]['English']}  ({i['Spell']})" for index, i in
+                        enumerate(json_data['StrangeWords'], start=1) if i['Sentences']]
+    sub_title_maker(docx, "例句填填看", "记词义,练拼写,学的快")
+    tb = Table(docx, 1, 1, tb_name="例句填填看", border=True)
+    tb.set_tb_colum_width(0, 460)
+    text = ["请在横线上写下单词在例句中的词义,若想不起来,可随时到例句答案表中查看。\n",
+            "参阅过答案的例句,请在句前的“□”中标记问号,以便复习回顾。\n",
+            "单词有多个意思的,应只填写适合语境的意思。\n",
+            "例句中有不熟悉的单词,请用斜线划掉,以便拍照报告给我们。"]
+    cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+    p = ParagraphBase(cell_p)
+    for t in text:
+        p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+        p.add_run_to_p(t, size=10)
+
+    for i in example_sentence:
+        p = docx.add_blank_paragraph(dq=4, dh=4)
+        p.add_run_to_p("□  ", size=12, font_name="宋体")
+        p.add_run_to_p(i + "___________")
+
+    docx.add_page_section()
+
+
+@time_use
+def section_7(docx: Word, json_data, *args, **kwargs):
+   
+    def wanxing(index, article_single):
+        article_id = article_single['Id']
+        article_length = article_single['AllWordAmount']
+       
+        strange_words_ids = [i['MeanId'] for i in json_data['StrangeWords']]
+       
+        explanatory_words_ids = [i['MeaningId'] for i in article_single['ExplanatoryWords']]
+
+       
+        select_text = []
+        for ques_index, candidates in enumerate(article_single['Questions'], start=1):
+            single_select_text = ''
+            for s in candidates['Candidates']:
+                single_select_text += s['Label'] + '. '
+                participle = s['Participle'] 
+                if participle:
+                    single_select_text += participle + ' \n'
+                else:
+                    text = s['Text']
+                    single_select_text += text + ' \n'
+
+            select_text.append(f"{ques_index}. {single_select_text}")
+
+       
+        all_select_text = "\n".join(select_text)
+
+       
+        article_main: str = article_single['English'] + "\n\n郑重提示:认真看完全文再看问题。\n\n" + all_select_text
+        article_main_list = article_main.split(" ")
+
+       
+        explanatory_words = "\n\n".join(
+            [f"{index}. {i['Spell']} [{i['SymbolsEn']}] [{i['SymbolsAm']}] {i['Meaning']}" for index, i in
+             enumerate(article_single['ExplanatoryWords'], start=1)])
+
+        sub_title_maker(docx, "真题强化练", "智能匹配难度,轻松提升阅读")
+        tb = Table(docx, 1, 1, tb_name="真题强化练", border=True)
+        tb.set_tb_colum_width(0, 460)
+        text = ["练习中不认识的单词,尽量猜测词义,并用斜线划掉,以便拍照报告给我们。\n\n",
+                "答题完毕后,可查字典,并注释在右侧批注区,不要在原文上注释。复习时不必通读全文,结合上下文能回忆起标记词的词义即可,想不起的再对照批注区。\n",
+                "完形填空是优秀的测验题型,却不适合用于训练阅读能力和提升词汇量,所以建议在阅读能力(理解度、速度、难度)达标后再做完形填空题型练习。\n",
+                "阅读能力达标的同学,按三遍法做完形填空,基本可以达到满分。三遍法要求如下:\n",
+                "第一遍(理解):结合选项通读全文,以求理解文章主旨,但不动笔,以免形成成见。\n",
+                "第二遍(填空):通读全文,从候选词中选出适宜项目,将完整的单词填入空格,使文章连贯。\n",
+                "第三遍(核验):通读填空后的全文,确认上下文无矛盾之处。\n",
+                "三遍通读均应记录起讫时间,并将速度纳入能力考核项目。能力合格者,考试中也应有充裕时间完成以上 3 遍通读。\n",
+                "阅读计时从此处开始,请按顺序完成阅读,并注意记录时间。"]
+
+        text2 = [f"全题长度(含问题及选项):{article_length}; 编号:{article_id};\n",
+                 "第一遍(理解)开始时间:_________________     第二遍(填空)开始时间:_________________\n",
+                 "第三遍(核验)开始时间:_________________"]
+        cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+        p = ParagraphBase(cell_p)
+        for t in text:
+            p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+            p.add_run_to_p(t, size=10)
+        for t2 in text2:
+            p.add_run_to_p(t2, size=10)
+
+        docx.add_blank_paragraph()
+
+        tb1 = Table(docx, 1, 3)
+        tb1.set_tb_colum_width(width=[90, 370, 5])
+
+        tb1_p = ParagraphBase(tb1.get_cell_paragraph(0, 0, align="left"))
+        tb1_p.add_pic("make_docx_demo/static/lianxi1.jpg", width=2.5)
+        tb1.set_cell_text(0, 1, f"篇幅(含问题选项):{article_length} 词	阅读开始时间:_____点_____分_____秒",
+                          size=9.5, border=False, align="left")
+
+        tb2 = Table(docx, rows=1, cols=2, border=True, tb_name="完形填空")
+        tb2.set_tb_colum_width(width=[320, 140])
+
+       
+       
+        tb2_p = ParagraphBase(tb2.get_cell_paragraph(0, 0, align="left"))
+        for w in article_main_list:
+            word = re.search(r"\[(\d+)]", w)
+            if word:
+                w = w[:w.find('[')]
+                meaning_id = int(word.group(1))
+                if meaning_id in strange_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, bold=True)
+                elif meaning_id in explanatory_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, italic=True)
+                else: 
+                    tb2_p.add_run_to_p(w + ' ', size=10.5)
+            else:
+                tb2_p.add_run_to_p(w + ' ', size=10.5)
+
+        tb2.set_cell_text(0, 1, explanatory_words, size=10.5, font_color=(80, 80, 80), align="left")
+
+        docx.add_blank_paragraph()
+        tail_zhushi = """第一遍(理解)结束时间:__________用时:____秒
+第二遍(填空)结束时间:__________用时:____秒
+第三遍(核验)结束时间:__________用时:____秒
+总计用时:____分____秒
+"""
+        docx.add_paragraph(tail_zhushi, size=10.5)
+        docx.add_blank_paragraph()
+
+   
+    def reading(index, article_single):
+
+        def single_yuedu(index, a):
+            article_id = a['Id']
+            article_length = a['AllWordAmount'] 
+
+            strange_words_ids = set() 
+            explanatory_words_ids = set() 
+            bold_word = set() 
+            italics_word = set() 
+            italics_index_dict = {} 
+
+            for i in json_data['StrangeWords']:
+                strange_words_ids.add(i['MeanId'])
+                bold_word.add(i['Spell'])
+                bold_word.update([change_word['Spell'] for change_word in i['WordChanges']])
+            for italics_index,ii in enumerate(a['ExplanatoryWords'], start=1):
+                explanatory_words_ids.add(ii['MeaningId'])
+                italics_word.add(ii['Spell'])
+                if 'WordChanges' in ii:
+                    italics_word.update([change_word['Spell'] for change_word in ii['WordChanges']])
+                    italics_index_dict.update({change_word['Spell']:f"[{italics_index}]" for change_word in ii['WordChanges']})
+               
+                italics_index_dict[ii['MeaningId']] = f"[{italics_index}]"
+                italics_index_dict[ii['Spell']] = f"[{italics_index}]"
+
+           
+            select_text = []
+            for ques_index, candidates in enumerate(a['Questions'], start=1):
+                single_select_text = ''
+               
+                subject = candidates['Subject'] + '\n' 
+                for s in candidates['Candidates']:
+                    single_select_text += s['Label'] + '. ' 
+                    participle = s['Participle'] 
+                    if participle:
+                        single_select_text += participle + ' \n'
+                    else:
+                        text = s['Text']
+                        single_select_text += text + ' \n'
+                select_text.append(str(ques_index) + ". " + subject + single_select_text)
+
+           
+            all_select_text = "\n".join(select_text)
+
+           
+            article_main: str = a['English'] + "\n\n郑重提示:认真看完全文再看问题。\n" + all_select_text
+            article_main_list = split_text_to_word_punctuation(article_main)
+
+           
+            explanatory_words = "\n\n".join(
+                [f"{index}. {i['Spell']}\n [{i['SymbolsEn']}] [{i['SymbolsAm']}]\n {i['Meaning']}" for index, i in
+                 enumerate(a['ExplanatoryWords'], start=1)])
+
+            tb1 = Table(docx, 1, 3, tb_name="图片小标题")
+            tb1.set_tb_colum_width(width=[90, 370, 5])
+
+            tb1_p = ParagraphBase(tb1.get_cell_paragraph(0, 0, align="left"))
+            tb1_p.add_pic(f"make_docx_demo/static/lianxi{index}.jpg", width=2.5)
+            tb1.set_cell_text(0, 1, f"篇幅(含问题选项):{article_length} 词	阅读开始时间:_____点_____分_____秒",
+                              size=9.5, border=False, align="left")
+
+            tb2 = Table(docx, rows=1, cols=2, border=True, tb_name="阅读")
+            tb2.set_tb_colum_width(width=[320, 140])
+
+           
+            tb2_p = ParagraphBase(tb2.get_cell_paragraph(0, 0, align="left"))
+            for w in article_main_list:
+                word = re.search(r"\[(\d+)]", w)
+                if word:
+                    w = w[:w.find('[')]
+                    meaning_id = int(word.group(1))
+                    if meaning_id in strange_words_ids:
+                        tb2_p.add_run_to_p(w + ' ', size=10.5, bold=True)
+                    elif meaning_id in explanatory_words_ids:
+                        italics_index_str = italics_index_dict[meaning_id]
+                        tb2_p.add_run_to_p(w + f'{italics_index_str} ', size=10.5, italic=True)
+                    else:
+                        tb2_p.add_run_to_p(w + ' ', size=10.5)
+
+                else: 
+                    if w in bold_word:
+                        tb2_p.add_run_to_p(w + ' ', size=10.5, bold=True)
+                    elif w in italics_word:
+                        italics_index_str = italics_index_dict[w]
+                        tb2_p.add_run_to_p(w + f'{italics_index_str} ', size=10.5, italic=True)
+                    else:
+                        tb2_p.add_run_to_p(w + ' ', size=10.5)
+
+            tb2.set_cell_text(0, 1, explanatory_words, size=10.5, font_color=(80, 80, 80), align="left", centre=False,line_spacing=300)
+
+            docx.add_blank_paragraph()
+            tail_zhushi = """完成时间:_____点_____分_____秒,本篇用时:_____秒。"""
+            docx.add_paragraph(tail_zhushi, size=10.5)
+            docx.add_blank_paragraph()
+
+        def top_header():
+            sub_title_maker(docx, "阅读提升练", "智能匹配难度,轻松提升阅读", "鲍利提分, 高效学习专家")
+           
+            tb = Table(docx, 1, 1, tb_name="真题强化练", border=True)
+            tb.set_tb_colum_width(0, 460)
+            text = ["阅读中不认识的单词,尽量猜测词义,并用斜线划掉,以便拍照报告给我们。\n",
+                    "读完全文后,可查字典,并抄在右侧批注区,不要在原文上注释。复习时不必通读全文,结合上下文能回忆起标记词的词义即可,想不起的再对照批注区。\n",
+                    "阅读训练的目的是提高对英语词、句、篇的敏感度,答题只是检验学习成果的手段,所以切勿为了快速做题而跳读、略读。阅读速度是很重要的训练指标,请在确实理解词句的基础上尽量提高阅读速度。只要平时扎实阅读,考试中不会没有时间认真读题。\n",
+                    "阅读计时从此处开始,请按顺序完成阅读,并注意记录时间。\n\n",
+                    "生词划线示例:competitions she once attended. Incuding her years of experience"
+                    ]
+            cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+            pp = ParagraphBase(cell_p)
+            for index_t, t in enumerate(text):
+                if index_t == len(text) - 1:
+                    pp.add_run_to_p(t, size=12)
+                    pp.add_rectangle('', x=115, y=170, width=55, height=25, boder_color='000000', shape_type='line')
+                    pp.add_rectangle('', x=298, y=170, width=55, height=25, boder_color='000000', shape_type='line')
+                else:
+                    pp.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+                    pp.add_run_to_p(t, size=10)
+
+            docx.add_blank_paragraph()
+        "---------------------开始单篇运行---------------------"
+        if index == 1: 
+            top_header()
+        single_yuedu(index, article_single)
+
+
+   
+    def seven_to_five(index, article_single):
+        article_id = article_single['Id']
+        article_length = article_single['AllWordAmount']
+       
+        strange_words_ids = [i['MeanId'] for i in json_data['StrangeWords']]
+       
+        explanatory_words_ids = [i['MeaningId'] for i in article_single['ExplanatoryWords']]
+
+       
+        select_text = []
+        for ques_index, s_candidates in enumerate(article_single['Candidates'], start=1):
+            single_select_text = ''
+            single_select_text += s_candidates['Label'] + '. '
+            participle = s_candidates['Participle'] 
+            if participle:
+                single_select_text += participle
+            else:
+                text = s_candidates['Text']
+                single_select_text += text
+
+            select_text.append(f"{single_select_text}")
+
+       
+        all_select_text = "\n".join(select_text)
+
+       
+        article_main: str = article_single['English'] + "\n\n郑重提示:认真看完全文再看问题。\n\n" + all_select_text
+        article_main_list = article_main.split(" ")
+
+       
+        explanatory_words = "\n\n".join(
+            [f"{index}. {i['Spell']} [{i['SymbolsEn']}] [{i['SymbolsAm']}] {i['Meaning']}" for index, i in
+             enumerate(article_single['ExplanatoryWords'], start=1)])
+
+        sub_title_maker(docx, "阅读提升练", "智能匹配难度,轻松提升阅读", "鲍利提分, 高效学习专家")
+        tb = Table(docx, 1, 1, tb_name="真题强化练", border=True)
+        tb.set_tb_colum_width(0, 460)
+        text = ["阅读中不认识的单词,尽量猜测词义,并用斜线划掉,以便拍照报告给我们。\n",
+                "读完全文后,可查字典,并抄在右侧批注区,不要在原文上注释。复习时不必通读全文,结合上下文能回忆起标记词的词义即可,想不起的再对照批注区。\n",
+                "7 选 5 题型是测试学生对文章理解程度的好题型,但因打破里文章的连贯性,故不是训练阅读能力的好素材。建议学生在阅读基本能力(理解度、速度、难度)达标后再开展 7 选 5 题型训练。若不能胜任本练习,请回到词汇与阅读训练,先打好基础。\n",
+                "阅读计时从此处开始,请按顺序完成阅读,并注意记录时间。"]
+        cell_p = tb.get_cell_paragraph(0, 0, align="left", dq=10, dh=10)
+        p = ParagraphBase(cell_p)
+        for t in text:
+            p.add_run_to_p("☆ ", size=10, font_name="MS Gothic")
+            p.add_run_to_p(t, size=10)
+
+        docx.add_blank_paragraph()
+
+        tb1 = Table(docx, 1, 3, tb_name="图片小标题")
+        tb1.set_tb_colum_width(width=[90, 370, 5])
+
+        tb1_p = ParagraphBase(tb1.get_cell_paragraph(0, 0, align="left"))
+        tb1_p.add_pic("make_docx_demo/static/lianxi1.jpg", width=2.5)
+        tb1.set_cell_text(0, 1, f"篇幅(含问题选项):{article_length} 词	阅读开始时间:_____点_____分_____秒",
+                          size=9.5, border=False, align="left")
+
+        tb2 = Table(docx, rows=1, cols=2, border=True, tb_name="七选五")
+        tb2.set_tb_colum_width(width=[320, 140])
+
+       
+        tb2_p = ParagraphBase(tb2.get_cell_paragraph(0, 0, align="left"))
+        for w in article_main_list:
+            word = re.search(r"\[(\d+)]", w)
+            if word:
+                w = w[:w.find('[')]
+                meaning_id = int(word.group(1))
+                if meaning_id in strange_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, bold=True)
+                elif meaning_id in explanatory_words_ids:
+                    tb2_p.add_run_to_p(w + ' ', size=10.5, italic=True)
+                else: 
+                    tb2_p.add_run_to_p(w + ' ', size=10.5)
+            else:
+                tb2_p.add_run_to_p(w + ' ', size=10.5)
+
+        tb2.set_cell_text(0, 1, explanatory_words, size=10.5, font_color=(80, 80, 80), align="left")
+
+        docx.add_blank_paragraph()
+
+    "判断题型;根据题型选择----------------------------"
+   
+    all_article_length = 0
+
+    for index, article_single in enumerate(json_data['Articles'], start=1):
+        article_type = article_single['Category'] 
+
+        article_type_select = {1: reading, 2: wanxing, 3: seven_to_five}
+
+        assert article_type in article_type_select
+        article_type_select[article_type](index, article_single) 
+
+       
+        article_length = article_single['AllWordAmount']
+        all_article_length += article_length
+
+    tail_zhushi = f"""阅读计时在此结束。
+今日总计阅读量 {all_article_length} 词,用时________秒,整份学案共有_______个题目答对。"""
+    docx.add_paragraph(tail_zhushi, size=10.5)
+    docx.add_blank_paragraph()
+
+    docx.add_page_section()
+
+
+@time_use
+def section_9(docx: Word, json_data, *args, **kwargs):
+    def wanxing(index,article_count, article_single):
+        chinese_article = article_single['Chinese']
+        all_analysis = '' 
+        docx.add_paragraph("答案和解析", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+
+       
+        for ques_index, question_item in enumerate(article_single['Questions'], start=1):
+            analysis = question_item['Analysis'].strip() 
+            abcd_label = '' 
+
+            candidates = question_item['Candidates']
+            for abcd_selected in candidates:
+                if abcd_selected['IsRight']: 
+                    abcd_label += abcd_selected['Label'].strip()
+
+            all_analysis += f"{ques_index}.\n{abcd_label}  {analysis}\n" 
+
+        docx.add_paragraph(all_analysis, size=9)
+        docx.add_paragraph("全文参考译文", chinese_font_name="微软雅黑", dq=15, dh=5, bold=True)
+        docx.add_paragraph(chinese_article, size=9, dq=5, dh=5, line_spacing=300)
+
+   
+    def reading(index,article_count, article_single):
+        """
+        index : 外面传入,从1开始。如果只有
+        """
+        all_analysis = '' 
+        all_difficult_sentences = [] 
+
+        chinese_article = article_single['Chinese']
+
+       
+        questions = article_single['Questions']
+        for ques_index, question_item in enumerate(questions, start=1):
+            analysis = question_item['Analysis'].strip("\n") 
+            abcd_label = '' 
+
+            candidates = question_item['Candidates']
+            for abcd_selected in candidates:
+                if abcd_selected['IsRight']: 
+                    abcd_label += abcd_selected['Label'].strip("\n")
+
+            new_line = "" if ques_index==len(questions) else "\n"
+            all_analysis += f"{ques_index}.{abcd_label}  {analysis}{new_line}"
+
+       
+        if index!=article_count:
+            all_analysis += '\n'
+
+        docx.add_paragraph(f"Passage {index}", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True, size=16)
+        docx.add_paragraph("全文参考译文", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+        docx.add_paragraph(chinese_article, size=9)
+
+        docx.add_paragraph("答案和解析", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+        docx.add_paragraph(all_analysis, size=9)
+
+
+    def seven_to_five(index,article_count, article_single):
+        chinese_article = article_single['Chinese']
+        all_analysis = '' 
+
+        docx.add_paragraph("答案和解析", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+       
+        for q_index, question_item in enumerate(article_single['Questions'], start=1):
+            analysis = question_item['Analysis'] 
+            abcd_label = '' 
+
+            candidates = question_item['Candidates']
+            for abcd_selected in candidates:
+                if abcd_selected['IsRight']: 
+                    abcd_label += abcd_selected['Label']
+            all_analysis += f"{q_index}.{abcd_label}  {analysis}\n"
+
+        docx.add_paragraph(all_analysis, size=9)
+        docx.add_paragraph("全文参考译文", chinese_font_name="微软雅黑", dq=15, dh=5, bold=True)
+        docx.add_paragraph("Passage 1", chinese_font_name="微软雅黑", dq=5, dh=5, bold=True)
+        docx.add_paragraph(chinese_article, size=9, dq=5, dh=5, line_spacing=300)
+
+    "判断题型;根据题型选择----------------------------"
+    sub_title_maker(docx, "解题自主纠", "自主学习,逐步养成良好学习习惯","鲍利提分,你的智能教练")
+    articles = json_data['Articles']
+    article_count = len(articles)
+    for index, article_single in enumerate(articles, start=1):
+        article_type = article_single['Category'] 
+        article_type_select = {1: reading, 2: wanxing, 3: seven_to_five}
+        assert article_type in article_type_select
+        article_type_select[article_type](index,article_count, article_single) 
+
+    docx.add_docx_component("make_docx_demo/word_component/blank.docx")
+    docx.add_page_section()
+
+
+@time_use
+def section_10(docx: Word, json_data, scanpage_format, *args, **kwargs):
+    docx.add_paragraph("☆ 请写出词义,再对照筛査表批改。词义顺序可互换;答案意思相符即可,不要求一字不差。批改结果眷抄到筛査表。", size=9, dq=2,
+                       dh=2)
+    tb = Table(docx, 50, 4, tb_name="写出词义")
+    tb.set_tb_colum_width(width=[110, 120, 110, 120])
+
+    for row in range(50):
+        tb.set_cell_text(row, 0, str(row + 1) + " " + "rich", size=8.5, dq=1, dh=1, border=False)
+        tb.set_cell_text(row, 1, "□________________", size=10, dq=0, dh=0, border=False)
+        tb.set_cell_text(row, 2, str(row + 51) + " " + "rich", size=8.5, dq=1, dh=1, border=False)
+        tb.set_cell_text(row, 3, "□________________", size=10, dq=0, dh=0, border=False)
+    tb.set_row_height(13)
+    docx.add_page_break()
+
+    docx.add_paragraph("☆ 请在需要加强学习的词义前方框中划线,两头各超出 1 毫米为宜(示例:□☑52.example);请保持本表整洁并交回。", size=9, dq=2,
+                       dh=2)
+    tb2 = Table(docx, 25, 8, tb_name="划线表")
+    tb2.set_tb_colum_width(width=[57.5] * 8)
+
+    docx.add_blank_paragraph(dq=5, dh=5)
+
+    for row in range(25):
+        tb2.set_cell_text(row, 0, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 1, "星期二", size=8.5, border="right", dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 2, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 3, "星期二", size=8.5, border="right", dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 4, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 5, "星期二", size=8.5, border="right", dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 6, "[01] Tuesday", size=8.5, border=False, dq=1.2, dh=1.2)
+        tb2.set_cell_text(row, 7, "星期二", size=8.5, border=False, dq=1.2, dh=1.2)
+
+    docx.set_page_column(5)
+    docx.add_docx_component("make_docx_demo/word_component/component.docx")
+    docx.end_page_column()
+
+    if scanpage_format == 3:
+        docx.add_page_section()
+
+
+@time_use
+def two_check_page(docx: Word, json_data, *args, **kwargs):
+   
+    def empty_filter_page(class_name, student_name, page_title, page_sub_title, t_datetime, word_data_list):
+        page_sub_title = "词汇训练" 
+
+        if len(word_data_list) % 2 != 0:
+            word_data_list.append("") 
+
+        tb = Table(docx, 1, 3, tb_name="头部三元素")
+        tb.set_tb_colum_width(width=[40, 100, 100])
+
+       
+       
+       
+
+        tb.set_tb_colum_width(0, 100)
+        tb.set_cell_text(0, 0, f"鲍利提分", border=False, size=16, bold=True, chinese_font_name="黑体")
+        tb.set_cell_text(0, 1, f"{class_name}\n{student_name}", border=False, size=8, dh=2)
+        tb.set_cell_text(0, 2, f"{page_title}\n{page_sub_title}", border=False, size=8, dh=2)
+
+        docx.add_paragraph("请写出词义,再对照筛查表批改。词义顺序可互换;答案意思相符即可,不要求一字不差。批改结果誊抄到筛查表。", size=9)
+
+        tb = Table(docx, rows=0, cols=4, tb_name="第一页筛查表")
+
+        tb.set_all_border_fastly(xml=True, outside_side_border=True, outside_side_border_size=5)
+
+        half_count = int(len(word_data_list) / 2) 
+        for index, row in enumerate(range(half_count)):
+            first_word, second_word = word_data_list[row], word_data_list[row + half_count]
+            cell3 = f"{index + 1 + half_count}. {second_word}" if second_word else ""
+            cell4 = "□ ___________________________" if second_word else ""
+
+            data = [f"{index + 1}. {first_word}", "□ ___________________________", cell3, cell4]
+            tb.add_table_row_data_xml_fastly(data, font_size=[10.5, 9, 10.5, 9])
+        tb.set_row_height(13.8)
+        tb.set_table_width_xml([2124, 3257, 2140, 3257])
+        blank_count = " " * 80
+        p = docx.add_blank_paragraph(dq=5)
+        p.add_run_to_p(f"{t_datetime} {page_title}-{page_sub_title}{blank_count}", size=8, chinese_font_name="仿宋", font_name="仿宋")
+        docx.add_page_break() 
+
+   
+    def filter_table_page(page_id, class_name, student_name, page_title, page_sub_title, t_datetime,
+                          foot_description, foot_description2, article_type, word_data_list2):
+        page_sub_title = "词汇训练" 
+        if len(word_data_list2) % 2 != 0:
+            word_data_list2.append(["", ""]) 
+
+        tb = Table(docx, 1, 5, tb_name="头部五元素")
+        tb.set_tb_colum_width(width=[80, 100, 120, 150, 70])
+
+        tb.set_cell_text(0, 0, f"鲍利提分", border=False, size=16, bold=True, chinese_font_name="黑体")
+        tb.set_cell_text(0, 1, f"{class_name}\n{student_name}", border=False, size=8)
+        tb.set_cell_text(0, 2, f"{page_id}", border=False, size=16, dh=2, bold=True, font_name="黑体")
+        tb.set_cell_text(0, 3, f"{page_title}\n{page_sub_title}", border=False, size=8)
+
+        p_cell = tb.get_cell_paragraph(0, 4)
+        p = ParagraphBase(p_cell)
+        io_image = qrcode_maker(f"{page_id}")
+        p.add_pic(io_image, width=Inches(0.6))
+        io_image.close()
+
+        pp = docx.add_blank_paragraph()
+
+        pp.add_run_to_p("下述词汇相应的词义未掌握的请划掉,并将整个页面拍照给我们,以便记录词汇掌握数据。示例:comfort 4. 舒适,安逸", size=9)
+        pp.add_rectangle('', x=540, y=10, width=55, height=0, boder_color='000000', shape_type='line')
+
+        tb = Table(docx, rows=0, cols=4, tb_name="第二页筛查表")
+
+        ## 1234竖着放
+        total_row = int(len(word_data_list2) / 2)
+        for row in range(total_row):
+            spell1, meaning1 = word_data_list2[row]
+            spell2, meaning2 = word_data_list2[total_row + row]
+
+            cell3 = f"{spell2}" if spell2 else ""
+            cell4 = f"{total_row + row + 1}. {meaning2}" if meaning2 else ""
+
+            data = [f"{spell1}", f"{row + 1}. {meaning1}", cell3, cell4] 
+            tb.add_table_row_data_xml_fastly(data, font_size=[10.5, 9, 10.5, 9], alignment=['right', 'left', 'right', 'left'])
+
+       
+        tb.set_row_height(13.8)
+        tb.set_table_width_xml([2124, 3257, 2140, 3257])
+
+        docx.add_paragraph(f"{t_datetime} {page_title}-{page_sub_title}{foot_description}", size=8, chinese_font_name="仿宋",
+                           font_name="仿宋", dq=5)
+
+
+   
+    student_name = json_data.get("StudentInfo").get("StudentName", '') 
+    class_name = json_data.get("StudentInfo").get("ClassName", '').replace("词汇突击", "") 
+    t_datetime = time.strftime("%Y-%m-%d %H:%M", time.localtime()) 
+    article_type = 1
+    try:
+        article_type = json_data['WordAndArticleContents'][0]['Articles'][0]['Category'] 
+    except Exception as e:
+        log_err_e(e, "学案类型不存在就取1,词汇突击里面只有阅读理解")
+
+   
+
+    """---------------------------------------------------------------------------------"""
+    screening_scanPages = json_data['ScreeningScanPages']
+    for index, page in enumerate(screening_scanPages, start=1):
+        page_id = str(page['PageId']).rjust(11, "0")
+
+        page_title = page['Title'] 
+        page_sub_title = page['SubTitle'] 
+        foot_description = page['FootDescription'] 
+        foot_description2 = page['FootDescription2'] 
+       
+
+        word_data_list1 = []
+        word_data_list2 = []
+        for i in page['FilterTable']['Items']: 
+            word_data_list1.append(i['Spell'])
+            word_data_list2.append([i['Spell'], i['Meaning']])
+
+       
+       
+       
+
+       
+        filter_table_page(page_id, class_name, student_name, page_title, page_sub_title, t_datetime,
+                          foot_description, foot_description2, article_type, word_data_list2)
+       
+        if index!=len(screening_scanPages):
+            pass 
+        docx.add_page_break()
+
+
+
+def old_two_check_page(docx: Word, json_data, **kwargs):
+   
+    def empty_filter_page(class_name, student_name, page_title, page_sub_title, t_datetime, word_data_list):
+        if len(word_data_list) % 2 != 0:
+            word_data_list.append("") 
+
+        tb = Table(docx, 1, 3, tb_name="头部三元素")
+        tb.set_tb_colum_width(width=[140, 100, 100])
+
+       
+       
+       
+
+        tb.set_cell_text(0, 0, f"鲍利提分", border=False, size=16, bold=True, chinese_font_name="黑体")
+        tb.set_cell_text(0, 1, f"{class_name}\n{student_name}", border=False, size=8, dh=2)
+        tb.set_cell_text(0, 2, f"{page_title}\n{page_sub_title}", border=False, size=8, dh=2)
+
+        docx.add_paragraph("请写出词义,再对照筛查表批改。词义顺序可互换;答案意思相符即可,不要求一字不差。批改结果誊抄到筛查表。", size=9)
+
+        tb = Table(docx, rows=0, cols=4, tb_name="第一页筛查表")
+
+        tb.set_all_border_fastly(xml=True, outside_side_border=True, outside_side_border_size=5)
+
+        half_count = int(len(word_data_list) / 2) 
+        for index, row in enumerate(range(half_count)):
+            first_word, second_word = word_data_list[row], word_data_list[row + half_count]
+            cell3 = f"{index + 1 + half_count}. {second_word}" if second_word else ""
+            cell4 = "□ ___________________________" if second_word else ""
+
+            data = [f"{index + 1}. {first_word}", "□ ___________________________", cell3, cell4]
+            tb.add_table_row_data_xml_fastly(data, font_size=[10.5, 9, 10.5, 9])
+        tb.set_row_height(13.8, first_row_h=6)
+        tb.set_table_width_xml([2124, 3257, 2140, 3257])
+        blank_count = " " * 80
+        p = docx.add_blank_paragraph(dq=5)
+        p.add_run_to_p(f"{t_datetime} {page_title}-{page_sub_title}{blank_count}", size=8, chinese_font_name="仿宋", font_name="仿宋")
+        docx.add_page_break() 
+
+   
+    def filter_table_page(page_id, class_name, student_name, page_title, page_sub_title, t_datetime,
+                          foot_description, foot_description2, article_type, word_data_list2):
+        if len(word_data_list2) % 2 != 0:
+            word_data_list2.append(["", ""]) 
+
+        tb = Table(docx, 1, 5, tb_name="头部五元素")
+        tb.set_tb_colum_width(width=[80, 100, 120, 150, 70])
+
+       
+       
+       
+
+        tb.set_cell_text(0, 0, f"鲍利提分", border=False, size=16, bold=True, chinese_font_name="黑体")
+        tb.set_cell_text(0, 1, f"{class_name}\n{student_name}", border=False, size=8)
+        tb.set_cell_text(0, 2, f"{page_id}", border=False, size=16, dh=2, bold=True, font_name="黑体")
+        tb.set_cell_text(0, 3, f"{page_title}\n{page_sub_title}", border=False, size=8)
+
+        p_cell = tb.get_cell_paragraph(0, 4)
+        p = ParagraphBase(p_cell)
+        page_id = int(page_id)
+        io_image = qrcode_maker(f"{page_id}")
+        p.add_pic(io_image, width=Inches(0.6))
+        io_image.close()
+
+        pp = docx.add_blank_paragraph()
+        p_base = ParagraphBase(pp)
+        p_base.p.add_run_to_p("请在需要加强学习的词义前方框中划线,两头各超出1毫米为宜(示例:", size=9)
+        p_base.p.add_pic("make_docx_demo/static/line_example.png", width=Inches(0.8))
+        p_base.p.add_run_to_p(" );请保持本表整洁并交回。", size=9)
+
+        tb = Table(docx, rows=0, cols=4, tb_name="第二页筛查表")
+
+        ## 1234竖着放
+        total_row = int(len(word_data_list2) / 2)
+        for row in range(total_row):
+            spell1, meaning1 = word_data_list2[row]
+            spell2, meaning2 = word_data_list2[total_row + row]
+
+            cell3 = f"{total_row + row + 1}. {spell2}" if spell2 else ""
+            cell4 = f"□ {meaning2}" if meaning2 else ""
+
+            data = [f"{row + 1}. {spell1}", f"□ {meaning1}", cell3, cell4] 
+            tb.add_table_row_data_xml_fastly(data, font_size=[10.5, 9, 10.5, 9])
+
+        tb.set_all_border_fastly(xml=True, outside_side_border=True, outside_side_border_size=5)
+        tb.set_row_height(13.6, first_row_h=6)
+        tb.set_table_width_xml([2124, 3257, 2140, 3257])
+        if article_type == 1: 
+            docx.add_paragraph(f"{t_datetime} {page_title}-{page_sub_title}{foot_description}", size=8, chinese_font_name="仿宋",
+                               font_name="仿宋", dq=5)
+            docx.add_paragraph(foot_description2, align="right", size=8, chinese_font_name="仿宋")
+        else:
+            docx.add_paragraph(f"{t_datetime} {page_title}-{page_sub_title}{foot_description}", size=8, chinese_font_name="仿宋",
+                               font_name="仿宋", dq=5)
+
+   
+    student_name = json_data.get("StudentInfo").get("StudentName", '') 
+    class_name = json_data.get("StudentInfo").get("ClassName", '') 
+    t_datetime = time.strftime("%Y-%m-%d %H:%M", time.localtime()) 
+    article_type = json_data['WordAndArticleContents'][0]['Articles'][0]['Category'] 
+    is_add_empty_filter_page = json_data['Config']['AddEmptyFilterPage'] 
+
+    """---------------------------------------------------------------------------------"""
+    for index, page in enumerate(json_data['ScreeningScanPages'], start=1):
+        page_id = str(page['PageId']).rjust(11, "0")
+
+       
+        if index >= 2:
+            docx.add_page_break()
+
+        page_title = page['Title'] 
+        page_sub_title = page['SubTitle'] 
+        foot_description = page['FootDescription'] 
+        foot_description2 = page['FootDescription2'] 
+
+        word_data_list1 = []
+        word_data_list2 = []
+        for i in page['FilterTable']['Items']: 
+            word_data_list1.append(i['Spell'])
+            word_data_list2.append([i['Spell'], i['Meaning']])
+
+       
+        if is_add_empty_filter_page:
+            empty_filter_page(class_name, student_name, page_title, page_sub_title, t_datetime, word_data_list1)
+
+       
+        filter_table_page(page_id, class_name, student_name, page_title, page_sub_title, t_datetime,
+                          foot_description, foot_description2, article_type, word_data_list2)
+
+
+@time_use
+def other(docx, json_data, *args, **kwargs):
+   
+    sections = docx.doc.sections
+    for section in sections[:-1]:
+        section.top_margin = Inches(0.3)
+        section.bottom_margin = Inches(0.4)
+        section.left_margin = Inches(0.8)
+        section.right_margin = Inches(0.8)
+        section.footer_distance = 180000
+
+    sections[-1].top_margin = Inches(0.1)
+    sections[-1].bottom_margin = Inches(0.1)
+    sections[-1].left_margin = Inches(0.5)
+    sections[-1].right_margin = Inches(0.5)
+
+    header_maker(docx, json_data)
+
+
+def start_make_word(json_data, document_format, scanpage_format):
+    parent_path = "make_docx_demo/file_result/" 
+    if not os.path.exists(parent_path):
+        os.makedirs(parent_path)
+    try:
+        exercise_id = json_data['ExerciseId'] 
+
+       
+        docx = Word(save_file_name=f"{parent_path}{exercise_id}.docx",
+                    start_template_name="make_docx_demo/word_component/start_template.docx")
+       
+        section_1(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+       
+        section_4(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+       
+        for exercise_json in json_data['WordAndArticleContents']: 
+            section_4_1(docx=docx, json_data=exercise_json, scanpage_format=scanpage_format)
+            section_5(docx=docx, json_data=exercise_json, scanpage_format=scanpage_format)
+            section_6(docx=docx, json_data=exercise_json, scanpage_format=scanpage_format)
+            section_7(docx=docx, json_data=exercise_json, scanpage_format=scanpage_format)
+            section_9(docx=docx, json_data=exercise_json, scanpage_format=scanpage_format)
+
+        if scanpage_format == 1:
+           
+            two_check_page(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+           
+            old_two_check_page(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+        elif scanpage_format == 2:
+            section_10(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+        elif scanpage_format == 3:
+            section_10(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+            two_check_page(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+
+        other(docx=docx, json_data=json_data, scanpage_format=scanpage_format)
+
+        docx.save_docx()
+        if document_format == 1:
+            return f"{parent_path}{exercise_id}.docx"
+        else:
+            convert_word_to_pdf(f"{parent_path}{exercise_id}")
+            return f"{parent_path}{exercise_id}.pdf"
+
+    except Exception as e:
+        log_err_e(e)
+
+
+if __name__ == '__main__':
+    import os
+
+    t = time.time()
+    os.chdir("..")
+
+   
+    start_make_word(test_json1, 1, 1)
+    print(time.time() - t)

+ 41 - 0
make_docx_demo/new_word2pdf.py

@@ -0,0 +1,41 @@
+# -*- coding:utf-8 -*-
+
+import os
+import time
+from docx2pdf import convert
+import win32com.client
+from concurrent.futures import ProcessPoolExecutor
+from multiprocessing import Process
+import pythoncom
+
+
+def convert_word_to_pdf(input_file):
+    output_file = input_file.replace('.docx', '.pdf')
+    word = win32com.client.Dispatch("Word.Application")
+    word.Visible = False 
+    doc = word.Documents.Open(input_file)
+    doc.SaveAs(output_file, FileFormat=17) 
+    doc.Close()
+    word.Quit()
+
+def convert_word_to_pdf2(input_file):
+    pythoncom.CoInitialize()
+    convert(input_file)
+    pythoncom.CoUninitialize()
+
+if __name__ == '__main__':
+    import os
+    files = os.listdir(r"C:\Users\86131\Desktop\回收\潘资料")
+    print(files)
+
+    t = time.time()
+    p_lists = []
+    for file in files:
+        p1 = Process(target=convert_word_to_pdf2, args=(r"C:\\Users\\86131\\Desktop\\回收\\潘资料\\"+file,))
+        p1.start()
+        p_lists.append(p1)
+
+    for p in p_lists:
+        p.join()
+
+    print(time.time() - t)

BIN
make_docx_demo/static/2.jpg


BIN
make_docx_demo/static/baidu_qrcode.png


BIN
make_docx_demo/static/chart.png


BIN
make_docx_demo/static/happy_word.jpg


BIN
make_docx_demo/static/lianxi1.jpg


BIN
make_docx_demo/static/lianxi2.jpg


BIN
make_docx_demo/static/lianxi3.jpg


BIN
make_docx_demo/static/lianxi4.jpg


BIN
make_docx_demo/static/lianxi5.jpg


BIN
make_docx_demo/static/lianxi6.jpg


BIN
make_docx_demo/static/lianxi7.jpg


BIN
make_docx_demo/static/lianxi8.jpg


BIN
make_docx_demo/static/line.jpg


BIN
make_docx_demo/static/line_example.png


BIN
make_docx_demo/static/logo.png


BIN
make_docx_demo/static/logo2.png


BIN
make_docx_demo/static/pen.png


BIN
make_docx_demo/static/qr_code.jpg


BIN
make_docx_demo/static/qr_code.png


BIN
make_docx_demo/static/t1.jpg


BIN
make_docx_demo/static/首页示意图.jpg


BIN
make_docx_demo/static/首页示意图2.jpg


+ 63 - 0
make_docx_demo/word2pdf.py

@@ -0,0 +1,63 @@
+# -*- coding=utf-8 -*-
+from docx2pdf import convert
+import pythoncom
+import time
+import os
+from threading import Lock
+
+ll = Lock()
+
+
+def convert_word_to_pdf(pdf_name):
+    for cccou in range(3):
+        try:
+            ll.acquire()
+            print('加锁,进入转pdf')
+            pythoncom.CoInitialize()
+            convert(f'{pdf_name}.docx') 
+            for i in range(30):
+                if os.path.exists(f'{pdf_name}.pdf'):
+                    break
+                time.sleep(0.5)
+            break
+        except Exception as ee:
+           
+            print(ee)
+        finally:
+            pythoncom.CoUninitialize()
+            print('解锁,转pdf完成')
+            ll.release() 
+
+def convert_word_to_pdf2(pdf_name):
+    for cccou in range(3):
+        try:
+            convert(f'{pdf_name}.docx') 
+            for i in range(30):
+                if os.path.exists(f'{pdf_name}.pdf'):
+                    break
+                time.sleep(0.5)
+            break
+        except Exception as ee:
+            print(ee)
+
+
+if __name__ == '__main__':
+    import multiprocessing
+
+   
+   
+   
+
+   
+   
+   
+   
+   
+   
+   
+   
+   
+   
+   
+    #
+   

+ 227 - 0
make_docx_demo/word_component/make_rectangle.py

@@ -0,0 +1,227 @@
+# -*- coding:utf-8 -*-
+from random import randint
+
+def make_shape_fun(text, x, y, boder_size, width, height, font_color, fill_color, font_size, boder_color, chinese_font, english_font, dash,
+                   shape_type='rect',rotate_angle=0,behindDoc=0):
+    """
+    rotate_angle:角度,顺时针30,60,90等
+    behindDoc为0浮于文字上方,为1浮于文字下方"""
+    if x > 600: x = 600
+    if y > 800: y = 800
+    font_size = font_size * 2
+    boder_size = boder_size * 12700 
+    dash_elem = '<a:prstDash val="dash"/>' if dash else ''
+
+    idid = randint(1, 99999)
+    x, y, width, height = x * 10000, y * 10000, width * 10000, height * 10000
+
+    if fill_color:
+        filled = f"""<a:solidFill><a:srgbClr val="{fill_color}"/></a:solidFill>"""
+        nofilled = ''
+    else:
+        filled = ''
+        nofilled = """<a:noFill/>"""
+
+    if boder_color:
+        boder = f"""<a:lnRef idx="2"><a:srgbClr val="{boder_color}"/></a:lnRef>"""
+        noboder = f"""<a:ln w="{boder_size}"><a:srgbClr val="{boder_color}"/>{dash_elem}</a:ln>"""
+    else:
+       
+       
+       
+        boder = """<a:lnRef idx="2"><a:noFill/></a:lnRef>"""
+        noboder = """<a:ln w="12700"><a:noFill/></a:ln>"""
+
+    if not text:
+        text = f'<w:t> </w:t>'
+    else:
+        text = f'<w:t>{text}</w:t>'
+
+    if shape_type != 'line':
+        insert_text_xml = f"""
+<wps:txbx>
+    <w:txbxContent>
+        <w:p w:rsidR="00305AED" w:rsidRDefault="00305AED" w:rsidP="00305AED">
+            <w:pPr>
+                <w:jc w:val="center"/>
+                <w:rPr>
+                    <w:color w:val="ff0000" w:themeColor="text1"/>
+                    <w:sz w:val="32"/>
+                </w:rPr>
+            </w:pPr>
+            <w:r>
+                <w:rPr>
+                    <w:rFonts w:ascii="{english_font}" w:eastAsia="{chinese_font}" w:hAnsi="{english_font}" w:hint="eastAsia"/>
+                    <w:color w:val="{font_color}"/>
+                    <w:sz w:val="{font_size}"/>
+                </w:rPr>
+                {text}
+            </w:r>
+        </w:p>
+    </w:txbxContent>
+</wps:txbx>"""
+    else:
+        insert_text_xml = ''
+
+   
+    shape_geom_map = {
+        'rect': '<a:prstGeom prst="rect"><a:avLst/></a:prstGeom>',
+        'circle': '<a:prstGeom prst="ellipse"><a:avLst/></a:prstGeom>',
+        'triangle': '<a:prstGeom prst="triangle"><a:avLst/></a:prstGeom>',
+        'diamond': '<a:prstGeom prst="diamond"><a:avLst/></a:prstGeom>',
+        'pie': '<a:prstGeom prst="pie"><a:avLst/></a:prstGeom>',
+        'heart': '<a:prstGeom prst="heart"><a:avLst/></a:prstGeom>',
+        'star': '<a:prstGeom prst="star"><a:avLst/></a:prstGeom>',
+        'polygon': '<a:prstGeom prst="polygon"><a:avLst/></a:prstGeom>',
+        'line': '<a:prstGeom prst="line"><a:avLst/></a:prstGeom>',
+        'smileyFace': '<a:prstGeom prst="smileyFace"><a:avLst/></a:prstGeom>',
+        'moon': '<a:prstGeom prst="moon"><a:avLst/></a:prstGeom>',
+        'cloud': '<a:prstGeom prst="cloud"><a:avLst/></a:prstGeom>',
+        'bevel': '<a:prstGeom prst="bevel"><a:avLst/></a:prstGeom>',
+        'invertedTriangle': '<a:prstGeom prst="invertedTriangle"><a:avLst/></a:prstGeom>',
+        'arc': '<a:prstGeom prst="arc"><a:avLst/></a:prstGeom>',
+    }
+
+   
+    shape_geom = shape_geom_map.get(shape_type, '<a:prstGeom prst="rect"><a:avLst/></a:prstGeom>')
+
+    r = f"""<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<w:document xmlns:wpc="http://schemas.microsoft.com/office/word/2010/wordprocessingCanvas"
+            xmlns:cx="http://schemas.microsoft.com/office/drawing/2014/chartex"
+            xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
+            xmlns:o="urn:schemas-microsoft-com:office:office"
+            xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships"
+            xmlns:m="http://schemas.openxmlformats.org/officeDocument/2006/math"
+            xmlns:v="urn:schemas-microsoft-com:vml"
+            xmlns:wp14="http://schemas.microsoft.com/office/word/2010/wordprocessingDrawing"
+            xmlns:wp="http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing"
+            xmlns:w10="urn:schemas-microsoft-com:office:word"
+            xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main"
+            xmlns:w14="http://schemas.microsoft.com/office/word/2010/wordml"
+            xmlns:w15="http://schemas.microsoft.com/office/word/2012/wordml"
+            xmlns:w16se="http://schemas.microsoft.com/office/word/2015/wordml/symex"
+            xmlns:wpg="http://schemas.microsoft.com/office/word/2010/wordprocessingGroup"
+            xmlns:wpi="http://schemas.microsoft.com/office/word/2010/wordprocessingInk"
+            xmlns:wne="http://schemas.microsoft.com/office/word/2006/wordml"
+            xmlns:wps="http://schemas.microsoft.com/office/word/2010/wordprocessingShape"
+            mc:Ignorable="w14 w15 w16se wp14">
+	<w:body>
+		<w:p w:rsidR="00943687"
+		     w:rsidRDefault="00305AED">
+			<w:bookmarkStart w:id="0"
+			                 w:name="_GoBack"/>
+			<w:bookmarkEnd w:id="0"/>
+			<w:r>
+				<w:rPr>
+					<w:noProof/>
+				</w:rPr>
+				<mc:AlternateContent>
+					<mc:Choice Requires="wps">
+						<w:drawing>
+							<wp:anchor distT="0"
+							           distB="0"
+							           distL="114300"
+							           distR="114300"
+							           simplePos="0"
+							           relativeHeight="251659264"
+							           behindDoc="{behindDoc}"
+							           locked="0"
+							           layoutInCell="1"
+							           allowOverlap="1">
+								<wp:simplePos x="0"
+								              y="0"/>
+								<wp:positionH relativeFrom="column">
+									<wp:posOffset>{x}</wp:posOffset>
+								</wp:positionH>
+								<wp:positionV relativeFrom="paragraph">
+									<wp:posOffset>{y}</wp:posOffset>
+								</wp:positionV>
+								<wp:extent cx="{width}"
+								           cy="{height}"/>
+								<wp:effectExtent l="0"
+								                 t="0"
+								                 r="13335"
+								                 b="14605"/>
+								<wp:wrapNone/>
+								<wp:docPr id="{idid}"
+								          name="形状 {idid}"/>
+								<wp:cNvGraphicFramePr/>
+								<a:graphic xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/main">
+									<a:graphicData uri="http://schemas.microsoft.com/office/word/2010/wordprocessingShape">
+										<wps:wsp>
+											<wps:cNvSpPr/>
+											<wps:spPr>
+												<a:xfrm rot="{60000*rotate_angle}">
+													<a:off x="0"
+													       y="0"/>
+													<a:ext cx="1777593"
+													       cy="614477"/>
+												</a:xfrm>
+												{shape_geom}
+												{filled}
+												{nofilled}
+												{noboder}
+											</wps:spPr>
+											<wps:style>
+                                                {boder}
+												<a:fillRef idx="1">
+													<a:schemeClr val="accent1"/>
+												</a:fillRef>
+												<a:effectRef idx="0">
+													<a:schemeClr val="accent1"/>
+												</a:effectRef>
+												<a:fontRef idx="minor">
+													<a:schemeClr val="lt1"/>
+												</a:fontRef>
+											</wps:style>
+											{insert_text_xml}
+											<wps:bodyPr rot="0"
+											            spcFirstLastPara="0"
+											            vertOverflow="overflow"
+											            horzOverflow="overflow"
+											            vert="horz"
+											            wrap="square"
+											            lIns="91440"
+											            tIns="45720"
+											            rIns="91440"
+											            bIns="45720"
+											            numCol="1"
+											            spcCol="0"
+											            rtlCol="0"
+											            fromWordArt="0"
+											            anchor="ctr"
+											            anchorCtr="0"
+											            forceAA="0"
+											            compatLnSpc="1">
+												<a:prstTxWarp prst="textNoShape">
+													<a:avLst/>
+												</a:prstTxWarp>
+												<a:noAutofit/>
+											</wps:bodyPr>
+										</wps:wsp>
+									</a:graphicData>
+								</a:graphic>
+							</wp:anchor>
+						</w:drawing>
+					</mc:Choice>
+
+				</mc:AlternateContent>
+			</w:r>
+		</w:p>
+		<w:sectPr w:rsidR="00943687">
+			<w:pgSz w:w="11906"
+			        w:h="16838"/>
+			<w:pgMar w:top="1440"
+			         w:right="1800"
+			         w:bottom="1440"
+			         w:left="1800"
+			         w:header="851"
+			         w:footer="992"
+			         w:gutter="0"/>
+			<w:cols w:space="425"/>
+			<w:docGrid w:type="lines"
+			           w:linePitch="312"/>
+		</w:sectPr>
+	</w:body>
+</w:document>"""
+    return r

+ 180 - 0
mock/mock_request.py

@@ -0,0 +1,180 @@
+# -*- coding:utf-8 -*-
+#
+import requests
+import time
+import json 
+from functools import wraps
+
+product_adress = "http://111.231.167.191" 
+test_address = "http://111.231.167.191:8004" 
+local_adress = "http://127.0.0.1:9000" 
+
+use_address = product_adress 
+
+
+def time_use(fn):
+    @wraps(fn)
+    def cc(*args, **kwargs): 
+        f_time = time.time()
+        res = fn(*args, **kwargs)
+
+        cha = round(time.time() - f_time, 3)
+        if cha > 0.1:
+            print(f'函数:{fn.__name__} 一共用时', cha, '秒')
+        return res 
+
+    return cc 
+
+
+def test_connect():
+    """连接测试"""
+    try:
+        r = requests.post(use_address)
+        assert r.text == 'hello world'
+        r = requests.get(use_address)
+        assert r.text == 'hello world'
+        print("测试连接成功")
+    except AssertionError:
+        print("测试连接失败")
+
+
+def get_article():
+    """同时检验article接口和query_oss_file接口"""
+    json_data = {"meaning_ids": [300751, 300756, 300757, 300758, 300454, 302096, 302102], "callback_url": "http://localhost/callback",
+                 "student_stage": 1, "vocabulary": 700, "class_id": 123456}
+
+    r = requests.post(f"{use_address}/article", json=json_data)
+   
+    key = r.json()['key']
+    time.sleep(120)
+    query_file_content(key)
+
+
+def query_file_content(key):
+    json_data = {"key": key}
+    try:
+        r = requests.post(f"{use_address}/query_oss_file", json=json_data)
+        r.raise_for_status() 
+        response_data = r.json()
+        assert response_data['wordCount'] > 0, "词数为0"
+    except requests.RequestException as e:
+        print(f"请求失败: {e}")
+    except json.JSONDecodeError as e:
+        print(f"JSON解析错误: {e}")
+    except AssertionError as e:
+        print(f"断言错误: {e}")
+    except Exception as e:
+        print(f"未知错误: {e}")
+
+
+def get_audio():
+    word = "cat"
+    r1 = requests.post(f"{use_address}/tts", json={"text": word, "type": 0}) 
+    r2 = requests.post(f"{use_address}/tts", json={"text": word, "type": 2}) 
+    r3 = requests.post(f"{use_address}/tts", json={"text": word, "type": 1}) 
+    assert r1.json()['code'] == 200
+    assert r2.json()['code'] == 200
+    assert r3.status_code == 200
+
+
+@time_use
+def get_article2():
+    json_data = {"words": [
+        {'spell': 'term', 'meaning': '学期'}, {'spell': 'ordinary', 'meaning': '平常的, 普通的, 一般的'},
+        {'spell': 'discussion', 'meaning': '讨论, 谈论'}, {'spell': 'shine', 'meaning': '照耀, 发光, 闪耀'},
+        {'spell': 'million', 'meaning': '百万'}, {'spell': 'greet', 'meaning': '问候'}
+    ], "take_count": 1, "student_stage": 3, "demo_name": "测试项目"}
+
+    r = requests.post(f"{use_address}/article/reading-comprehension", json=json_data)
+    r_json = r.json()
+    try:
+        assert len(r_json['articles']) == 1
+        return r_json
+    except Exception as e:
+        print("春笋文章reading-comprehension错误", e)
+        print("错误数据", r_json)
+
+
+@time_use
+def get_article2_1():
+    """新的获取文章"""
+    json_data = {'core_words': [{'spell': 'sudden', 'meaning': '突然的, 意外的', 'word_id': 1114468, 'meaning_id': 1734}, {'spell': 'frighten', 'meaning': '惊吓, 惊恐', 'word_id': 899278, 'meaning_id': 1735}, {'spell': 'relation', 'meaning': '关系, 联系, 亲戚, 亲属', 'word_id': 1061800, 'meaning_id': 1736}, {'spell': 'Japanese', 'meaning': '日本的', 'word_id': 727384, 'meaning_id': 1737}, {'spell': 'trick', 'meaning': '恶作剧, 戏法, 作假, 欺骗', 'word_id': 1140881, 'meaning_id': 1740}, {'spell': 'yours', 'meaning': '你(们)的东西', 'word_id': 1169496, 'meaning_id': 1741}, {'spell': 'panda', 'meaning': '熊猫', 'word_id': 1015908, 'meaning_id': 1742}, {'spell': 'agreement', 'meaning': '协议,协定', 'word_id': 753401, 'meaning_id': 1743}, {'spell': 'pool', 'meaning': '游泳池, 池子', 'word_id': 1035634, 'meaning_id': 1747}, {'spell': 'risk', 'meaning': '冒险, 风险', 'word_id': 1069002, 'meaning_id': 1748}, {'spell': 'centre', 'meaning': '中心', 'word_id': 806629, 'meaning_id': 1749}, {'spell': 'shut', 'meaning': '关上, 关闭', 'word_id': 1088662, 'meaning_id': 1751}, {'spell': 'piano', 'meaning': '钢琴', 'word_id': 1027211, 'meaning_id': 1752}, {'spell': 'trust', 'meaning': '信任, 信赖', 'word_id': 1142977, 'meaning_id': 1753}, {'spell': 'camera', 'meaning': '照相机', 'word_id': 799656, 'meaning_id': 1754},{'spell': 'course', 'meaning': '课程', 'word_id': 834016, 'meaning_id': 399}, {'spell': 'carry', 'meaning': '携带', 'word_id': 803106, 'meaning_id': 460}, {'spell': 'sometimes', 'meaning': '有时, 间或', 'word_id': 1097431, 'meaning_id': 495}, {'spell': 'interesting', 'meaning': '有趣的, 令人感兴趣的', 'word_id': 944231, 'meaning_id': 600}, {'spell': 'thought', 'meaning': '思想, 想法', 'word_id': 1130826, 'meaning_id': 685}],
+                 'extend_words': [ {'spell': 'destroy', 'meaning': '破坏, 摧毁', 'word_id': 848592, 'meaning_id': 1288}, {'spell': 'project', 'meaning': '放映, 展现', 'word_id': 1044528, 'meaning_id': 1290}, {'spell': 'waste', 'meaning': '浪费, 荒芜, 废物', 'word_id': 1160701, 'meaning_id': 1292}, {'spell': 'environment', 'meaning': '环境, 外界', 'word_id': 873514, 'meaning_id': 1293}, {'spell': 'memory', 'meaning': '记忆, 记忆力, 回忆', 'word_id': 981104, 'meaning_id': 1294}],
+                 'take_count': 1, 'student_stage': 3, 'demo_name': '春笋英语'}
+
+    r = requests.post(f"{use_address}/article/reading-comprehension", json=json_data)
+    r_json = r.json()
+    try:
+        return r_json
+    except Exception as e:
+        print("春笋文章reading-comprehension错误", e)
+        print("错误数据", r_json)
+
+
+def download_word():
+    from make_docx_demo.data import test_json2
+    params = {"document_format": 2, "scanpage_format": 1}
+
+    r = requests.post(f"{use_address}/make_word/vocabulary_assault", params=params, json=test_json2)
+    r.raise_for_status()
+    suffix = {1: "docx", 2: "pdf"}[params['document_format']]
+    with open(f"test.{suffix}", "wb") as f:
+        f.write(r.content)
+
+
+def spoken_language():
+    url = f"{use_address}/spoken_language"
+    with open(r"1.mp3", "rb") as f:
+        audio_data = {'file': ('file.bin', f, 'application/octet-stream')}
+        data = {"text": "You must study to be frank with the world"}
+        r1 = requests.post(url, data=data, files=audio_data)
+    assert r1.json()['code'] == 0
+
+    data = {"text": "apple Life is like a box of chocolates, you never know what you're gonna",
+            "url": "https://yunzhixue.blob.core.chinacloudapi.cn/download/ttsmaker-file-2025-1-13-10-47-47.mp3"}
+    r2 = requests.post(url, data=data)
+    assert r2.json()['code'] == 0
+
+
+@time_use
+def get_article3():
+    json_data = {"words": [
+        {'spell': 'term', 'meaning': '学期'}, {'spell': 'ordinary', 'meaning': '平常的, 普通的, 一般的'},
+        {'spell': 'discussion', 'meaning': '讨论, 谈论'}, {'spell': 'shine', 'meaning': '照耀, 发光, 闪耀'},
+        {'spell': 'million', 'meaning': '百万'}, {'spell': 'greet', 'meaning': '问候'}
+    ], "take_count": 1, "student_stage": 3, "demo_name": "测试项目"}
+
+    r = requests.post(f"{use_address}/article/reading-comprehension/deepseek", json=json_data)
+    r_json = r.json()
+    try:
+        assert r_json['articles'][0]["english"]
+        assert r_json['articles'][0]["difficultSentences"]
+        assert r_json['articles'][0]["questions"]
+        assert r_json['articles'][0]["allWordAmount"] > 10
+        return r_json
+    except Exception as e:
+        print("deepseek生成文章出错", e)
+        print(r_json)
+
+
+@time_use
+def run_all_test_cese():
+    test_connect() 
+
+    get_audio() 
+    spoken_language() 
+    download_word() 
+    print(get_article2_1()) 
+
+   
+   
+
+
+if __name__ == '__main__':
+   
+   
+   
+   
+   
+    get_audio()
+

+ 1 - 0
spoken_language/common/__init__.py

@@ -0,0 +1 @@
+#!#-*-coding:utf-8 -*-

+ 6 - 0
spoken_language/common/credential.py

@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+class Credential:
+    def __init__(self, secret_id, secret_key, token=""):
+        self.secret_id = secret_id
+        self.secret_key = secret_key
+        self.token = token

+ 7 - 0
spoken_language/common/utils.py

@@ -0,0 +1,7 @@
+# -*- coding: utf-8 -*-
+import sys
+
+def is_python3():
+    if sys.version > '3':
+        return True
+    return False

+ 8 - 0
spoken_language/convert_tts.py

@@ -0,0 +1,8 @@
+# -*- coding:utf-8 -*-
+
+import ffmpeg
+
+input_file = 'input.wav'
+output_file = 'output.mp3'
+
+ffmpeg.input(input_file).output(output_file, ar='16000', acodec='libmp3lame').run()

+ 20 - 0
spoken_language/read_config.py

@@ -0,0 +1,20 @@
+# -*- coding:utf-8 -*-
+import yaml
+
+def read_config(parent_dir="."):
+   
+    with open(parent_dir +"/config/tencent_config.yaml", "r",encoding="utf-8") as file:
+        config = yaml.safe_load(file)
+        return config
+   
+   
+
+
+
+if __name__ == '__main__':
+    import sys,os
+
+    current_dir = os.path.dirname(os.path.abspath(__file__))
+    parent_dir = os.path.abspath(os.path.join(current_dir, os.pardir))
+    sys.path.append(parent_dir)
+    print(read_config(parent_dir))

+ 0 - 0
spoken_language/soe/__init__.py


+ 275 - 0
spoken_language/soe/speaking_assessment.py

@@ -0,0 +1,275 @@
+# -*- coding: utf-8 -*-
+import sys
+import hmac
+import hashlib
+import base64
+import time
+import json
+import threading
+import urllib
+
+import websocket
+import uuid
+from urllib.parse import quote
+from tools.loglog import logger
+
+def is_python3():
+    if sys.version > '3':
+        return True
+    return False
+
+
+class SpeakingAssessmentListener():
+    '''
+    reponse:  
+    on_recognition_start的返回只有voice_id字段。
+    on_fail 只有voice_id、code、message字段。
+    on_recognition_complete没有result字段。
+    其余消息包含所有字段。
+    字段名	类型	
+    code	Integer	
+    message	String	
+    voice_id	String
+    message_id	String
+    result
+    final	Integer	
+
+   
+   
+   
+   
+   
+   
+   
+   
+    #
+   
+   
+   
+   
+   
+    '''
+
+    def on_recognition_start(self, response):
+        pass
+
+    def on_intermediate_result(self, response):
+        pass
+
+    def on_recognition_complete(self, response):
+        pass
+
+    def on_fail(self, response):
+        pass
+
+
+NOTOPEN = 0
+STARTED = 1
+OPENED = 2
+FINAL = 3
+ERROR = 4
+CLOSED = 5
+
+
+def quote_autho(autho):
+    if sys.version_info >= (3, 0):
+        import urllib.parse as urlparse
+        return urlparse.quote(autho)
+    else:
+        return urllib.quote(autho)
+
+
+class SpeakingAssessment:
+
+    def __init__(self, appid, credential, engine_model_type, listener):
+        self.result = ""
+        self.credential = credential
+        self.appid = appid
+        self.server_engine_type = engine_model_type
+        self.status = NOTOPEN
+        self.ws = None
+        self.wst = None
+        self.voice_id = ""
+        self.new_start = 0
+        self.listener = listener
+        self.text_mode = 0
+        self.ref_text = ""
+        self.keyword = ""
+        self.eval_mode = 0
+        self.score_coeff = 1.0
+        self.sentence_info_enabled = 0
+        self.voice_format = 0
+        self.nonce = ""
+        self.rec_mode = 0
+
+    def set_text_mode(self, text_mode):
+        self.text_mode = text_mode
+    
+    def set_rec_mode(self, rec_mode):
+        self.rec_mode = rec_mode
+
+    def set_ref_text(self, ref_text):
+        self.ref_text = ref_text
+
+    def set_keyword(self, keyword):
+        self.keyword = keyword
+
+    def set_eval_mode(self, eval_mode):
+        self.eval_mode = eval_mode
+
+    def set_sentence_info_enabled(self, sentence_info_enabled):
+        self.sentence_info_enabled = sentence_info_enabled
+
+    def set_voice_format(self, voice_format):
+        self.voice_format = voice_format
+
+    def set_nonce(self, nonce):
+        self.nonce = nonce
+
+    def format_sign_string(self, param):
+        signstr = "soe.cloud.tencent.com/soe/api/"
+        for t in param:
+            if 'appid' in t:
+                signstr += str(t[1])
+                break
+        signstr += "?"
+        for x in param:
+            tmp = x
+            if 'appid' in x:
+                continue
+            for t in tmp:
+                signstr += str(t)
+                signstr += "="
+            signstr = signstr[:-1]
+            signstr += "&"
+        signstr = signstr[:-1]
+        return signstr
+
+    def create_query_string(self, param):
+        signstr = ""
+        for key, value in param.items():
+            if key == 'appid':
+                signstr += str(value)
+                break
+        signstr += "?"
+        for key, value in param.items():
+            if key == 'appid':
+                continue
+            value = quote_autho(str(value))
+            signstr += str(key) + "=" + str(value) + "&"
+        signstr = signstr[:-1]
+        return "wss://soe.cloud.tencent.com/soe/api/" + signstr
+
+    def sign(self, signstr, secret_key):
+        hmacstr = hmac.new(secret_key.encode('utf-8'),
+                           signstr.encode('utf-8'), hashlib.sha1).digest()
+        s = base64.b64encode(hmacstr)
+        s = s.decode('utf-8')
+        return s
+
+    def create_query_arr(self):
+        query_arr = dict()
+
+        query_arr['appid'] = self.appid
+        query_arr['server_engine_type'] = self.server_engine_type
+        query_arr['text_mode'] = self.text_mode
+        query_arr['rec_mode'] = self.rec_mode
+        query_arr['ref_text'] = self.ref_text
+        query_arr['keyword'] = self.keyword
+        query_arr['eval_mode'] = self.eval_mode
+        query_arr['score_coeff'] = self.score_coeff
+        query_arr['sentence_info_enabled'] = self.sentence_info_enabled
+        query_arr['secretid'] = self.credential.secret_id
+        if self.credential.token != "":
+            query_arr['token'] = self.credential.token
+        query_arr['voice_format'] = self.voice_format
+        query_arr['voice_id'] = self.voice_id
+        query_arr['timestamp'] = str(int(time.time()))
+        if self.nonce != "":
+            query_arr['nonce'] = self.nonce
+        else:
+            query_arr['nonce'] = query_arr['timestamp']
+        query_arr['expired'] = int(time.time()) + 24 * 60 * 60
+        return query_arr
+
+    def stop(self):
+        if self.status == OPENED:
+            msg = {'type': "end"}
+            text_str = json.dumps(msg)
+            self.ws.sock.send(text_str)
+        if self.ws:
+            if self.wst and self.wst.is_alive():
+                self.wst.join()
+        self.ws.close()
+
+    def write(self, data):
+        while self.status == STARTED:
+            time.sleep(0.1)
+        if self.status == OPENED:
+            self.ws.sock.send_binary(data)
+
+    def start(self):
+        def on_message(ws, message):
+           
+            response = json.loads(message)
+            response['voice_id'] = self.voice_id
+            if response['code'] != 0:
+                logger.error("%s server recognition fail %s" %
+                             (response['voice_id'], response['message']))
+                self.listener.on_fail(response)
+                return
+            if "final" in response and response["final"] == 1:
+                self.status = FINAL
+                self.result = message
+                self.listener.on_recognition_complete(response)
+               
+                self.ws.close()
+                return
+            else:
+                if response["result"] is not None:
+                    self.listener.on_intermediate_result(response)
+                    logger.info("%s recognition doing" % response['voice_id'])
+                    return
+
+        def on_error(ws, error):
+            if self.status == FINAL:
+                return
+            logger.error("websocket error %s  voice id %s" %
+                         (format(error), self.voice_id))
+            self.status = ERROR
+
+        def on_close(ws,close_status_code, close_msg):
+           
+           
+            self.status = CLOSED
+            logger.info("websocket closed  voice id %s" %
+                        self.voice_id)
+
+        def on_open(ws):
+            self.status = OPENED
+
+        query_arr = self.create_query_arr()
+        if self.voice_id == "":
+            query_arr['voice_id'] = str(uuid.uuid1())
+            self.voice_id = query_arr['voice_id']
+        query = sorted(query_arr.items(), key=lambda d: d[0])
+        signstr = self.format_sign_string(query)
+        autho = self.sign(signstr, self.credential.secret_key)
+        requrl = self.create_query_string(query_arr)
+       
+        if is_python3():
+            autho = urllib.parse.quote(autho)
+        else:
+            autho = urllib.quote(autho)
+        requrl += "&signature=%s" % autho
+       
+        self.ws = websocket.WebSocketApp(requrl, None,
+                                         on_error=on_error, on_close=on_close, on_message=on_message)
+        self.ws.on_open = on_open
+        self.wst = threading.Thread(target=self.ws.run_forever)
+        self.wst.daemon = True
+        self.wst.start()
+        self.status = STARTED
+        response = {'voice_id': self.voice_id}
+        self.listener.on_recognition_start(response)
+       

+ 155 - 0
spoken_language/soeexample.py

@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+import os
+import time
+import requests
+import threading
+from datetime import datetime
+import json
+
+from spoken_language.common import credential
+from spoken_language.soe import speaking_assessment
+
+from spoken_language.read_config import read_config
+
+
+config_data = read_config()
+app_id,secret_id,secret_key= config_data['appId'],config_data['SecretId'],config_data['SecretKey']
+
+APPID = app_id
+SECRET_ID = secret_id
+SECRET_KEY = secret_key
+TOKEN = ""
+ENGINE_MODEL_TYPE = "16k_en"
+SLICE_SIZE = 32000
+
+spoken_result = {}
+
+
+class MySpeechRecognitionListener(speaking_assessment.SpeakingAssessmentListener):
+    def __init__(self, id):
+        self.id = id
+
+    def on_recognition_start(self, response):
+        pass
+       
+       
+
+    def on_intermediate_result(self, response):
+        rsp_str = json.dumps(response, ensure_ascii=False)
+       
+       
+
+    def on_recognition_complete(self, response):
+        global spoken_result
+        spoken_result[self.id] = response
+       
+       
+       
+
+    def on_fail(self, response):
+        rsp_str = json.dumps(response, ensure_ascii=False)
+       
+       
+
+def process(id):
+    audio = r"C:\Users\86131\Desktop\音频\output_16k_mono.mp3"
+    listener = MySpeechRecognitionListener(id)
+   
+    credential_var = credential.Credential(SECRET_ID, SECRET_KEY)
+    recognizer = speaking_assessment.SpeakingAssessment(
+        APPID, credential_var, ENGINE_MODEL_TYPE,  listener)
+    recognizer.set_text_mode(0)
+    recognizer.set_ref_text("anyway")
+    recognizer.set_eval_mode(0)
+    recognizer.set_keyword("")
+    recognizer.set_sentence_info_enabled(0)
+    recognizer.set_voice_format(1)
+    try:
+        recognizer.start()
+        with open(audio, 'rb') as f:
+            content = f.read(SLICE_SIZE)
+            while content:
+                recognizer.write(content)
+                content = f.read(SLICE_SIZE)
+                #sleep模拟实际实时语音发送间隔
+               
+               
+                time.sleep(0.2)
+    except Exception as e:
+        print(e)
+    finally:
+        recognizer.stop()
+
+def process_rec(task_id,audio_path,audio_text,audio_binary=None):
+    audio = audio_path
+    listener = MySpeechRecognitionListener(task_id)
+   
+    credential_var = credential.Credential(SECRET_ID, SECRET_KEY)
+    recognizer = speaking_assessment.SpeakingAssessment(
+        APPID, credential_var, ENGINE_MODEL_TYPE,  listener)
+    recognizer.set_text_mode(0)
+    recognizer.set_ref_text(audio_text)
+    recognizer.set_eval_mode(1)
+    recognizer.set_keyword("")
+    recognizer.set_sentence_info_enabled(0)
+    recognizer.set_voice_format(2)
+   
+   
+    recognizer.set_rec_mode(1)
+    try:
+        recognizer.start()
+        if audio_binary: 
+           
+           
+            recognizer.write(audio_binary)
+        else:
+            with open(f"{task_id}.mp3", 'rb') as f:
+                content = f.read()
+                recognizer.write(content)
+    except Exception as e:
+        print(e)
+    finally:
+        recognizer.stop()
+
+
+def process_multithread(number):
+    thread_list = []
+    for i in range(0, number):
+        thread = threading.Thread(target=process, args=(i,))
+        thread_list.append(thread)
+        thread.start()
+
+    for thread in thread_list:
+        thread.join()
+
+
+def make_spoken(task_id,audio_url,audio_content,audio_text):
+
+    if audio_url:
+        print("有url,应该去下载mp3文件")
+       
+        r = requests.get(audio_url)
+        audio_content = r.content
+    else:
+        with open(f"{task_id}.mp3",'wb') as f:
+            f.write(audio_content)
+
+    process_rec(task_id,audio_path=f"",audio_text=audio_text,audio_binary=audio_content)
+    global spoken_result
+    for _ in range(60):
+        if task_id in spoken_result:
+            r = spoken_result[task_id]
+            del spoken_result[task_id]
+            if os.path.exists(f"{task_id}.mp3"):
+                os.remove(f"{task_id}.mp3")
+            return r
+        time.sleep(0.5)
+    return None
+
+if __name__ == "__main__":
+
+   
+   
+   
+    process_rec(0,r"C:\Users\86131\Desktop\音频\output_16k_mono.mp3","You must study to be frank with the world apple")
+   

+ 85 - 0
tools/ali_log.py

@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+from aliyun.log import LogClient, PutLogsRequest, LogItem, GetLogsRequest, IndexConfig
+import time
+import os
+
+accessKeyId = os.getenv("OSS_ACCESS_KEY_ID")
+accessKey = os.getenv("OSS_ACCESS_KEY_SECRET")
+endpoint = "cn-hangzhou.log.aliyuncs.com"
+
+client = LogClient(endpoint, accessKeyId, accessKey)
+
+project_name = "ai-test-7230"
+logstore_name = "test2-logstore"
+query = "*| select dev,id from " + logstore_name
+logstore_index = {'line': {
+    'token': [',', ' ', "'", '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t',
+              '\r'], 'caseSensitive': False, 'chn': False}, 'keys': {'dev': {'type': 'text',
+                                                                             'token': [',', ' ', "'", '"', ';', '=',
+                                                                                       '(', ')', '[', ']', '{', '}',
+                                                                                       '?', '@', '&', '<', '>', '/',
+                                                                                       ':', '\n', '\t', '\r'],
+                                                                             'caseSensitive': False, 'alias': '',
+                                                                             'doc_value': True, 'chn': False},
+                                                                     'id': {'type': 'long', 'alias': '',
+                                                                            'doc_value': True}}, 'log_reduce': False,
+    'max_text_len': 2048}
+
+from_time = int(time.time()) - 3600
+to_time = time.time() + 3600
+
+def create_project():
+    print("ready to create project %s" % project_name)
+    client.create_project(project_name, project_des="")
+    print("create project %s success " % project_name)
+    time.sleep(60)
+
+def create_logstore():
+    print("ready to create logstore %s" % logstore_name)
+    client.create_logstore(project_name, logstore_name, ttl=3, shard_count=2)
+    print("create logstore %s success " % project_name)
+    time.sleep(30)
+
+def create_index():
+    print("ready to create index for %s" % logstore_name)
+    index_config = IndexConfig()
+    index_config.from_json(logstore_index)
+    client.create_index(project_name, logstore_name, index_config)
+    print("create index for %s success " % logstore_name)
+    time.sleep(60 * 2)
+
+def put_logs(msg:str):
+    log_group = []
+
+    log_item = LogItem()
+    contents = [
+        ('info', msg),
+    ]
+    log_item.set_contents(contents)
+    log_group.append(log_item)
+    request = PutLogsRequest(project_name, logstore_name, "", "", log_group, compress=False)
+    client.put_logs(request)
+
+
+
+def get_logs():
+    print("ready to query logs from logstore %s" % logstore_name)
+    request = GetLogsRequest(project_name, logstore_name, from_time, to_time, query=query)
+    response = client.get_logs(request)
+    for log in response.get_logs():
+        for k, v in log.contents.items():
+            print("%s : %s" % (k, v))
+        print("*********************")
+
+
+if __name__ == '__main__':
+   
+   
+   
+   
+   
+   
+   
+    put_logs("测试")
+   
+   

File diff suppressed because it is too large
+ 251 - 0
tools/audio.py


+ 48 - 0
tools/del_expire_file.py

@@ -0,0 +1,48 @@
+# -*- coding=utf-8 -*-
+"""
+删除过期的文件资源
+"""
+import os
+import datetime
+from time import sleep
+
+
+def del_file(folder_path,expired_days=10):
+    """
+    删除文件夹内过时的文件
+    folder_path: 需要删除过期文件的文件夹
+    expired_days: 过期天数
+    """
+
+    if not os.path.exists(folder_path):
+        print("文件夹不存在")
+        return None
+
+   
+    now = datetime.datetime.now()
+
+   
+    for filename in os.listdir(folder_path):
+        file_path = os.path.join(folder_path, filename)
+       
+        if os.path.isfile(file_path):
+           
+            create_time = os.path.getctime(file_path)
+            create_date = datetime.datetime.fromtimestamp(create_time)
+           
+            delta = now - create_date
+           
+            if delta.days > expired_days:
+                os.remove(file_path)
+               
+
+
+def run_del_normal():
+    """这是小程序项目内的正常删除机制"""
+    while True:
+        del_file("make_docx_demo/file_result",expired_days=15)
+        sleep(3600*24)
+
+
+if __name__ == '__main__':
+    run_del_normal()

+ 105 - 0
tools/loglog.py

@@ -0,0 +1,105 @@
+# -*- coding=utf-8 -*-
+import time, os
+import traceback
+from loguru import logger
+from threading import Lock
+from tools.ali_log import put_logs
+
+class SimpleLogger:
+    """
+    简易日志:存放几乎没用的大量gpt日志
+    """
+
+    def __init__(self, base_file_name: str = "ai_log"):
+        self.base_file_name = "log/" + base_file_name 
+        self.file_ext = ".txt"
+        self.max_size = 10 * 1024 * 1024 
+        self.current_file = self._get_current_file()
+        self.lock = Lock()
+        if not os.path.exists("log"):
+            os.mkdir("log")
+
+    def _get_current_file(self):
+        """获取当前应该写入的文件"""
+        i = 1
+        while True:
+            file_name = f"{self.base_file_name}_{i}{self.file_ext}"
+            if not os.path.exists(file_name) or os.path.getsize(file_name) < self.max_size:
+                return file_name
+            i += 1
+
+    def _check_file_size(self):
+        """检查文件大小,如果需要则切换到新文件"""
+        try:
+            if os.path.getsize(self.current_file) >= self.max_size:
+                self.current_file = self._get_current_file()
+        except FileNotFoundError:
+            with open(f"{self.base_file_name}_1{self.file_ext}", "a", encoding="utf-8") as log_file:
+                log_file.write(f"Hello World\n")
+
+    def log(self, message:str, level="INFO"):
+        """记录日志到文件"""
+        self._check_file_size()
+        date_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+        with self.lock:
+            with open(self.current_file, "a", encoding="utf-8") as log_file:
+                log_file.write(f"{date_time} 【{level}】 {str(message)}\n\n")
+
+    def info(self, message:str):
+        """记录INFO级别的日志"""
+        self.log(message, "INFO")
+
+    def warning(self, message:str):
+        """记录WARNING级别的日志"""
+        self.log(message, "WARNING")
+
+    def error(self, message:str):
+        """记录ERROR级别的日志"""
+        message = "\n" + "-" * 20 + "\n" + message + "\n" + "-" * 20
+        self.log(message, "ERROR")
+
+    def debug(self, message:str):
+        """记录DEBUG级别的日志"""
+        self.log(message, "DEBUG")
+
+logger.remove(handler_id=None) 
+logger.add('log/log.log', level="INFO", rotation="5 MB", encoding="utf-8", retention="7 days")
+logger.add('log/error.log', level="ERROR", rotation="5 MB", encoding="utf-8", retention="7 days")
+simple_logger = SimpleLogger()
+
+def exception_handler(func):
+    def wrapper(*args, **kwargs):
+        try:
+            return func(*args, **kwargs)
+        except Exception as e:
+            logger.error(f"{type(e).__name__}: {e}")
+            traceback_str = traceback.format_exc()
+            logger.error(f"错误追溯:{traceback_str}")
+    return wrapper
+
+def log_err_e(e:Exception,msg=None):
+    if msg:
+        logger.error(f"{msg}{type(e).__name__}:{e}")
+    traceback_str = traceback.format_exc()
+    logger.error(traceback_str)
+
+class AliyunLogHandler:
+    @staticmethod
+    def write(message):
+        put_logs(message)
+
+if os.getenv("env") != "development":
+    print("这是正式环境,加载阿里云日志")
+    aliyun_log_handler = AliyunLogHandler()
+    logger.add(aliyun_log_handler, enqueue=True) 
+
+
+if __name__ == '__main__':
+   
+   
+   
+    #
+   
+    import os
+    os.chdir("..")
+    logger.error("test信息0123456789.*/-")

+ 180 - 0
tools/new_mysql.py

@@ -0,0 +1,180 @@
+# -*- coding:utf-8 -*-
+import os
+from tools.loglog import logger
+import pymysql
+from dbutils.pooled_db import PooledDB
+import time
+
+
+class MySQLUploader:
+    _instance = None
+    _pool = None
+    _initialized = False
+
+    def __new__(cls, *args, **kwargs):
+        if not cls._instance:
+            cls._instance = super(MySQLUploader, cls).__new__(cls, *args, **kwargs)
+        return cls._instance
+
+    def __init__(self, database='qbank_db'):
+        if not self._initialized:
+            self.host = 'rm-uf6881jgyy065rdxdoo.rwlb.rds.aliyuncs.com'
+            self.user = 'qingti_user'
+            self.__password = 'qingti@2024'
+            self.database = database
+            self.attempts = 0
+            self.max_attempts = 3
+            self.start_time = None
+            self.connect()
+            self._initialized = True
+
+    def __del__(self):
+        if self._pool:
+            self._pool.close()
+            self._pool = None
+
+    def connect(self):
+        if not self._pool:
+            self._pool = PooledDB(
+                creator=pymysql,
+                maxconnections=20,
+                mincached=2,
+                maxcached=5,
+                maxshared=5,
+                blocking=True,
+                host=self.host,
+                user=self.user,
+                password=self.__password,
+                database=self.database,
+                port=3306,
+                charset='utf8mb4',
+            )
+            print("connect mysql succeed")
+
+    def execute_(self, query, params=None):
+        for _ in range(3):
+            conn = self._pool.connection() 
+            cursor = conn.cursor()
+            try:
+                if params:
+                    cursor.execute(query, params)
+                else:
+                    cursor.execute(query)
+                conn.commit()
+                return True
+            except pymysql.MySQLError as e:
+                logger.warning(f"可忽略的错误 {type(e).__name__},{e}")
+                conn.rollback()
+                time.sleep(0.5)
+            finally:
+                cursor.close()
+                conn.close()
+
+        logger.critical(f"execute_严重错误,3次提交没成功.{query} {params}")
+        return False
+
+    def bulk_insert(self, query, data_list):
+        """执行批量插入"""
+        for _ in range(3):
+            conn = self._pool.connection() 
+            cursor = conn.cursor()
+            try:
+                cursor.executemany(query, data_list)
+                conn.commit()
+                return True
+            except pymysql.MySQLError as e:
+                logger.warning(f"可忽略的错误 bulk_insert数据库批量插入错误{type(e).__name__}:{e}")
+                conn.rollback() 
+                time.sleep(0.5)
+            finally:
+                cursor.close()
+                conn.close()
+
+        logger.critical(f"bulk_insert,3次提交没成功.{query} {data_list}")
+        return False
+
+    def query_data(self, query, params=None):
+        """执行查询并返回结果"""
+        for _ in range(3):
+            conn = self._pool.connection() 
+            cursor = conn.cursor()
+            try:
+                if params:
+                    cursor.execute(query, params)
+                else:
+                    cursor.execute(query)
+               
+                results = cursor.fetchall()
+                return results
+            except pymysql.MySQLError as e:
+                logger.warning(f"可忽略query_data错误类型{type(e).__name__}:{e}")
+                logger.warning(f"可忽略query_data错误:{query},{params}")
+                time.sleep(0.5)
+            finally:
+                cursor.close()
+                conn.close()
+        logger.critical(f"query_data 3次没成功.{query} {params}")
+        return False
+
+    def execute_sql_file(self,script_file_path):
+        """执行sql脚本:传入路径或者sql路径都可以"""
+        def execute_file(path):
+
+           
+            with open(path, 'r', encoding='utf-8') as file:
+                sql_script = file.read()
+
+            conn = self._pool.connection() 
+            cursor = conn.cursor()
+           
+           
+            cursor.execute(sql_script)
+           
+            conn.commit()
+
+            cursor.close()
+            conn.close()
+
+        
+        if os.path.isdir(script_file_path):
+            for file in os.listdir(script_file_path):
+                execute_file(script_file_path + "\\" + file)
+        else:
+            if script_file_path.endswith(".sql"):
+                execute_file(script_file_path)
+
+    def close_connection(self):...
+
+
+
+if __name__ == '__main__':
+
+    m = MySQLUploader()
+    s = "select Id,BritishPronunciation from dictionary_word where wordspelling = %s"
+    r = m.query_data(s, ("sky",))
+    print(r)
+    input()
+
+   
+   
+   
+   
+
+   
+   
+   
+   
+   
+   
+
+   
+   
+   
+   
+   
+   
+   
+   
+    #
+   
+   

+ 250 - 0
tools/sql_format.py

@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+from tools.new_mysql import MySQLUploader
+from tools.loglog import logger
+
+from core.respone_format import *
+
+
+class CRUD:
+    def __init__(self):
+        self.m = MySQLUploader()
+        self.people_place_name = [] 
+        self.get_people_place_name()
+
+    def get_word_by_wordid(self, wordid):
+        s = "select WordSpelling from dictionary_word where Id = %s"
+        r = self.m.query_data(s, (wordid,))
+        if r:
+           
+            word = r[0][0]
+            return word
+        return None
+
+    def get_wordid_by_wordspelling(self, wordspelling,auto_insert=False):
+        """加一个功能。大字典内没有这个单词就自动插入,返回id。auto_insert为真,自动插入大字典,获取其id"""
+        s = "select Id from dictionary_word where wordspelling = %s"
+        r = self.m.query_data(s, (wordspelling,))
+        if r:
+           
+            wordid = r[0][0]
+            return wordid
+
+        if auto_insert:
+            s = "insert into dictionary_word (WordSpelling) VALUES (%s);"
+            self.m.execute_(s,(wordspelling,))
+            s = "select Id from dictionary_word where wordspelling = %s"
+            r = self.m.query_data(s, (wordspelling,))
+            wordid = r[0][0]
+            return wordid
+
+   
+    def get_exchange_prototype(self,wordspelling):
+        s = "select Word from dictionary_exchange where Word = %s"
+        r = self.m.query_data(s, (wordspelling,))
+        if r:
+            return list({i[0] for i in r})
+        s2 = "select Word from dictionary_exchange where InflectedWordSpelling = %s"
+        r2 = self.m.query_data(s2, (wordspelling,))
+        if r2:
+            return list({i[0] for i in r2})
+
+    def get_word_meaning_by_wordid(self, wordid):
+        s = "select Id,WordSpelling,WordMeaning from dictionary_meaningitem where WordId = %s"
+        r = self.m.query_data(s, (wordid,))
+        return r
+
+   
+    def get_people_place_name(self):
+        s2 = "select word from people_place_name"
+        r = self.m.query_data(s2)
+        for i in r:
+            self.people_place_name.append(i[0])
+
+
+   
+    def get_word_meaning_by_wordspelling(self, wordspelling, frequency):
+        """根据单词获取其全部词义"""
+       
+        wordid = self.get_wordid_by_wordspelling(wordspelling)
+
+       
+        return_data = {"word_id": wordid, "frequency": frequency, "word": wordspelling,
+                       "meanings": {"default": [], "sun_english": {"name": "春笋英语", "items": []}, "oxford": {"name": "牛津", "items": []}}}
+
+       
+        s = "select Id,WordMeaning from dictionary_meaningitem where WordSpelling = %s"
+        r = self.m.query_data(s, (wordspelling,))
+        for row_data in r:
+            return_data["meanings"]["default"].append({"id": row_data[0], "text": row_data[1]})
+
+       
+        s2 = "select Id,WordMeaning from dictionary_meaningitem_spring_bamboo where WordSpelling = %s"
+        r2 = self.m.query_data(s2, (wordspelling,))
+        for row_data in r2:
+            return_data["meanings"]["sun_english"]["items"].append({"id": row_data[0], "text": row_data[1]})
+
+       
+        s2 = "select Id,WordMeaning from dictionary_meaningitem_oxford where WordSpelling = %s"
+        r2 = self.m.query_data(s2, (wordspelling,))
+        for row_data in r2:
+            return_data["meanings"]["oxford"]["items"].append({"id": row_data[0], "text": row_data[1]})
+
+        return return_data
+
+    def delete_word_meaning_by_wordmeaningid(self, wordmeaningid):
+        s = "DELETE FROM dictionary_meaningitem where Id = %s"
+        r = self.m.execute_(s, (wordmeaningid,))
+        logger.info(f"根据词义id删除,{wordmeaningid}。结果{r}")
+        return True if r is True else False
+
+
+    def get_word_all_info(self,word_id, spell,frequency):
+        def get_associational_words_info(word_meaning_id) -> list:
+            return_data = []
+            s = "select Id,BaseWordMeaningId,BaseWord,BaseWordMeaning,AssociationalWord,AssociationalWordMeaningId,AssociationalWordMeaning," \
+                "AssociationReason,ReverseAssociationReason,CreatedTime,UpdatedTime " \
+                "from dictionary_associational_word where BaseWordMeaningId = %s"
+            r = self.m.query_data(s, (word_meaning_id,))
+            if not r:
+                return return_data
+
+            for single_meaning in r:
+                associational_id, base_word_meaning_id, base_word, base_word_meaning, associational_word, \
+                associational_word_meaning_id, associational_word_meaning, association_reason,\
+                reverse_association_reason, created_time, updated_time = single_meaning
+                r_data = {"id":associational_id,"base_word":{"word":base_word,"meaning_id":base_word_meaning_id,"meaning":base_word_meaning},
+                          "associational_word":{"word":associational_word,"meaning_id":associational_word_meaning_id,"meaning":associational_word_meaning},
+                          "association_reason":association_reason,"reverse_association_reason":reverse_association_reason,
+                          "create_time":created_time.strftime('%Y-%m-%d %H:%M:%S'),"update_time":updated_time.strftime('%Y-%m-%d %H:%M:%S')}
+                return_data.append(r_data)
+
+            return return_data
+
+        def get_phrases_info(word_meaning_id) -> list:
+            return_data = []
+            s = "select Id,PhraseSpellingText,PhraseChineseTranslation,FromType,CreatedTime,UpdatedTime " \
+                "from dictionary_phrase where WordMeaningId = %s"
+            r = self.m.query_data(s, (word_meaning_id,))
+            if not r:
+                return return_data
+            for single_phrase in r:
+                phrase_id, phrase_spelling_text, phrase_chinese_translation, from_type, created_time, updated_time = single_phrase
+                r_data = {"id":phrase_id,"english":phrase_spelling_text,"chinese":phrase_chinese_translation,"from":from_type,
+                          "create_time":created_time.strftime('%Y-%m-%d %H:%M:%S'),"update_time":updated_time.strftime('%Y-%m-%d %H:%M:%S')}
+                return_data.append(r_data)
+
+            return return_data
+
+        def get_exchanges_info(word_id) -> list:
+            return_data = []
+            s = "select Id,InflectedWordSpelling,Properties,WordTransformationDescription " \
+                "from dictionary_exchange where WordId = %s"
+            r = self.m.query_data(s, (word_id,))
+            if not r:
+                return return_data
+            for single_exchange in r:
+                exchange_id,spell,properties,description = single_exchange
+                r_data = {"id": exchange_id, "spell": spell, "properties": properties, "description": description}
+                return_data.append(r_data)
+
+            return return_data
+
+        return_data_all = {"word_id":word_id,"spell":spell,"frequency":frequency,"meanings":[],"exchanges":[]}
+        if spell in self.people_place_name:
+            return_data_all["type"] = "人名地名"
+        return_data_all["type"] = "一般词汇"
+
+        s = "select Id,WordMeaning,OperateAccount from dictionary_meaningitem where WordId = %s"
+        r = self.m.query_data(s, (word_id,))
+        if not r: 
+            return resp_200(data=return_data_all)
+        for single_meaning in r:
+            meaning_id, word_meaning, operate_account = single_meaning
+            single_meaning_item = {"id":meaning_id,"text":word_meaning,"editor":operate_account}
+            associational_words_list = get_associational_words_info(meaning_id)
+            single_meaning_item["associational_words"] = associational_words_list
+            phrases_list = get_phrases_info(meaning_id)
+            single_meaning_item["phrases"] = phrases_list
+            return_data_all["meanings"].append(single_meaning_item)
+
+        exchanges_info_list = get_exchanges_info(word_id)
+        return_data_all["exchanges"] = exchanges_info_list
+
+        return resp_200(data=return_data_all)
+
+    def delete_associational_word(self,word_id,associational_id):
+        s = "select Id from dictionary_meaningitem where WordId = %s"
+        r = self.m.query_data(s, (word_id,))
+        if not r:
+            return resp_400(message="词义表内没有这个单词的词义")
+        meaning_id = r[0][0]
+
+        s = "select BaseWordMeaningId from dictionary_associational_word where Id = %s"
+        r = self.m.query_data(s, (associational_id,))
+       
+        if r and r[0][0]==meaning_id:
+            s = "DELETE FROM dictionary_associational_word where Id = %s"
+            r = self.m.execute_(s, (associational_id,))
+            logger.info(f"根据联想词id删除,{associational_id}。结果{r}")
+            data = True if r is True else False
+            return resp_200(data=[data]) if data else resp_500(message="数据库内部错误")
+        else:
+            logger.info(f"删除联想词时,单词id与联想词id校验失败。{r} {meaning_id}")
+            return resp_400(message="单词id与联想词id校验失败")
+
+    def delete_phrese_word(self,word_id,phrase_id):
+        s = "select Id from dictionary_meaningitem where WordId = %s"
+        r = self.m.query_data(s, (word_id,))
+        if not r:
+            return resp_400(message="词义表内没有这个单词的词义")
+        meaning_id = r[0][0]
+
+        s = "select WordMeaningId from dictionary_phrase where Id = %s"
+        r = self.m.query_data(s, (phrase_id,))
+       
+        if r and r[0][0] == meaning_id:
+            s = "DELETE FROM dictionary_phrase where Id = %s"
+            r = self.m.execute_(s, (phrase_id,))
+            logger.info(f"根据联想词id删除,{phrase_id}。结果{r}")
+            data = True if r is True else False
+            return resp_200(data=[data]) if data else resp_500(message="数据库内部错误")
+        else:
+            logger.info(f"删除语块时,单词id与语块id校验失败。{r} {meaning_id}")
+            return resp_400(message="单词id与联想词id校验失败")
+
+    def close_connection(self):
+        """关闭数据库连接"""
+        self.m.close_connection()
+
+
+class UserCRUD:
+    def __init__(self):
+        self.m = MySQLUploader()
+
+    def get_userinfo_by_account(self, account):
+        s = "select id,account,password,uname,create_time from user where account = %s"
+        r = self.m.query_data(s, (account,))
+        if r:
+           
+            user_info = (r[0][0], r[0][1], r[0][2], r[0][3], r[0][4].strftime('%Y-%m-%d %H:%M:%S'))
+            return user_info
+        return None
+
+    def close_connection(self):
+        """关闭数据库连接"""
+        self.m.close_connection()
+
+
+if __name__ == '__main__':
+    crud = CRUD()
+   
+   
+   
+   
+   
+
+    r = crud.get_wordid_by_wordspelling("abcdefg")
+    print(type(r))
+    print(r)
+
+    crud.close_connection()

+ 5 - 0
tools/thread_pool_manager.py

@@ -0,0 +1,5 @@
+# -*- coding: utf-8 -*-
+from concurrent.futures import ThreadPoolExecutor, wait
+
+
+pool_executor = ThreadPoolExecutor(max_workers=20)

Some files were not shown because too many files changed in this diff