likemindsdb.properties - Personalization

+

Search Tips   |   Advanced Search

 


# LPS has a built in scheduler to fire the various asynchronous events.
# Syntax for specifying scheduled events.
# lps.schedule.<schedule_name>=<schedule time> <event name> [<event args>]
# <schedule_name> unique name for the scheduled event.
# <schedule time> A valid cron tab entry
# <event name> One of the predefined LPS event types
# <event args> Args for the event ( optional) 

lps.schedule.lazywrite=12,24,36,48,00 * * * *   lazyDBWrite
lps.schedule.purgeusers=5 4 * * * purgeUserCache
# Fire this event every 30 minutes, 
lps.schedule.cleartempusers=0,30 * * * * clearTempUserCache  1800
lps.schedule.synccache=28 4 * * * syncCache
lps.schedule.buildstats=00 3 * * * runBuildstats
lps.schedule.buildvisit=15 3 * * * runBuildvisit
lps.schedule.accumulator=30 3 * * * runAccumulator


# Database connection pool settings
# Max open DB connections allowed
# Max reconnect retry
# Max idle time allowed. Default is 0 : which is no reconnect.

db.max_reconnect.retry = 5
db.max_connection=10
db.connection.max_idle_time = 0

# Enable or disable sql logging. By default, sql logging is disabled
# because it may affect performance. But it may be useful at times
# to enable it, to see what sql is being executed.
db.enable_sql_logging = false

# Cache configuration
# Specifies the number of user and item objects stored in DB to cache.
# Recommended user size = Mentor pool size + # of active users on site per session
# Recommended item size = All items if possible or max popular items.
# Age time = reload object from DB if object is in cache for the specified time.

db.tune.user_cache = 5000
db.tune.item_cache = 4000

# Time in seconds 86400 = 24 hrs
db.tune.user_cache_age_time = 86400
db.tune.item_cache_age_time = 86400

# Max number of temporary users allowed. 
# Temp users are memory based and the age time specifies the max lifetime for
# the user. Each time the temp user is accessed lifetime is reset to the value specified.

db.tune.temp_user_cache=1
db.tune.temp_user_cache_age_time=2400

# LikeMinds allows any database column in the user and item table to be
# cached along with likeminds columns. Values for these columns can be retrieved
# through queryPrediction, getUserInfo, getItemInfo, getTransactions, etc.
# The following section specifies application defined columns to cache.
# for user and item object.
#
# Syntax is as fallows :
# db.cache.<symbolic table name>.<symbolic field name>=<field name>
# Custom user fields
# Note the following fields are always cached
# user_id, name, time_stamp
# item_id, year, num_rtg, total_rtg, total_square_rtg
# Example : user column passwd and flags are cached in user object.
# db.cache.user_data.Password = passwd
# db.cache.user_data.Flags = flags
# Example : item column title is cached in item object.
# Note the following fields are always cached
# db.cache.item_data.title = title

# Weights for computing bestbets
# Bestbet option sorts recommendation list based on prediction and confidence values.
# The following configuration values specifies how prediction and confidence is used
# to come up with a sort value. List is sorted in desending order of quality, items on the
# top represet the bestbets. 
# By configuring the following keys we can control the importance of prediction
# and confidence values.
# mpow = MultPower
# pow  = Power
# K = Coefficient
# Quality = confidence ^ mpow * (prediction - mean prediction ) + K * confidence ^ pow
  
db.applic.param.BestBets.Coefficient = 1.0
db.applic.param.BestBets.Power = 1.0
db.applic.param.BestBets.MultPower=0.5


# This key defines a multiplier to stop the absolute coverage from falling to zero
# in case the number of users is too large.
db.applic.param.abscvg_multiplier = 1.0


# Confidence levels
# Specifies the lowest confidence value and number of confidence levels
# All engines map their internal confidence values to one of the configured levels.

db.applic.param.LowestConfidence=1
db.applic.param.NumberConfidenceLevels=4

# The following setting specify the rating scale and special rating values. Preference
# engine looks at ratings which are within the specified rating scale and ignores all
# other ratings for the purpose of computing user similarity.
# Settings specified here are global settings for all rating sets. If we want to define
# different rating scale for each set than the scale should be configured as part of the 
# rating set configuration.

db.applic.param.LowestRating=0
db.applic.param.NumberRatingValues=13
# db.applic.param.UnknownRecommendation=-3
db.applic.param.WontRate=-1
db.applic.param.Unrated=-2

# Settings are used by getBestBet and getWorstBet API's only. Setting is
# is used to split the prediction list into bestbet and worst bet.
# Threshold specifies the prediction value above which all items are
# treated as bestbet. lsit_cutoff specifies the size of the bestbet/worstbet list
# to cache.

# db.applic.param.BestBets.Threshold= NumberRatingValues/2
# db.applic.param.WorstBets.Threshold= NumberRatingValues/2 - 1
# db.applic.param.BestBets.list.cutoff= 50 ( %value )

# Configuration used by getItemsToRate API. This specifies the max rows to fetch
# from vist_list table on each DB read.

db.tune.itemstorate.buffersize=20



# Mentor group defines all  properties associated with mentors used by a particular
# engine. Clickstream, purchase and preference engine require a valid mentor group to work.
# All mentor group names defined should be listed under "db.applic.mentor.source" key.
# 
# Properties associated with the mentor group :
# Table where user<--->mentor relationship is stored.
# Table were potential mentors for all users are stored.
# Number of mentors per user
# Column in user_data table to identify type of mentor. ( archetype, userzero, etc.)
# Threshold values to become a mentor ( min and max rating or clickstream data)
# Number of users in the mentor pool
# Column in the user_data table to lookup to determine who needs sifting ( sift pri )
# Number of threads to use for sifting
# How often to check for users to sift.
# When and how often to rebuild mentor pool.
# Behavior when no users to sift
# Behavior when multiple engines update the same sift pri column (recompute_only_on_new_data)
# Additional constraint to check for determining users to sift
# Constraint applies only to the user_data table and can be any SQL WHERE clause.

# All mentor groups defined for this installation

db.applic.mentor.source=ratingMentor, xactionMentor

# ratingMentor used by engPreference a Preference engine instance. This is a mentor group defines a rating
# based mentor group.

ratingMentor.table=lps_user_mentor
ratingMentor.max_mentors = 50
ratingMentor.mentor_type = mentor_type
ratingMentor.pri_check_interval = 20
ratingMentor.pri_list_size = 10
ratingMentor.mentor_cutoff=3.0
ratingMentor.min_mentor_ratings=10
ratingMentor.max_mentor_ratings=2000
ratingMentor.mentor_pool.table=lps_rtg_pool
ratingMentor.mentor_pool.size=5000
ratingMentor.sift_pri=sift_pri
# Specify constraint on who gets sifted in addition to sift pri column
# ratingMentor.constraint=user_id > 100
ratingMentor.recompute_only_on_new_data=true
ratingMentor.num_sift_threads=2

ratingMentor.sleep_on_no_sift_pri = true
ratingMentor.disable_build_mentor_pool = false
ratingMentor.build_mentor_pool_at = 00 2 * * *

ratingMentor.max_cached_mentors = 500


# Enable this if you set disable_build_mentor_pool = true
# ratingMentor.reload_mentor_pool_at= 00 3 * * *
# Next tow are required to control pause and resume
# Enable this if we want to pause sifting at specific intervals
# ratingMentor.pause_sifting_at= 00 15 * * *
# Enable this if we want to resume sifting at specific intervals
# ratingMentor.resume_sifting_at= 00 20 * * *

# xactionMentor used by engActivity a clickstream engine instance. This is a mentor 
# group defines a clickstream based mentor group.

xactionMentor.table=lps_trx_mentor
xactionMentor.max_mentors = 50
xactionMentor.mentor_type = mentor_type
xactionMentor.pri_check_interval = 20
xactionMentor.pri_list_size = 10
xactionMentor.mentor_cutoff=3.0
xactionMentor.min_mentor_xactions=3
xactionMentor.max_mentor_xactions=3000
xactionMentor.mentor_pool.table=lps_trx_pool
xactionMentor.mentor_pool.size=5000
xactionMentor.sift_pri=sift_pri
# Specify constraint on who gets sifted, for example
# xactionMentor.constraint=user_id > 100
xactionMentor.recompute_only_on_new_data=true
xactionMentor.num_sift_threads=2

xactionMentor.sleep_on_no_sift_pri = true
xactionMentor.disable_build_mentor_pool = false
xactionMentor.build_mentor_pool_at = 00 3 * * *

xactionMentor.max_cached_mentors = 500

# Enable this if you set disable_build_mentor_pool = true
# xactionMentor.reload_mentor_pool_at= 00 3 * * *
# Next tow are required to control pause and resume
# Enable this if we want to pause sifting at specific intervals
# xactionMentor.pause_sifting_at= 00 15 * * *
# Enable this if we want to resume sifting at specific intervals
# xactionMentor.resume_sifting_at= 00 20 * * *

# Rating set specifies how rating data is handled and used by an instance of preference engine.
# Every preference engine instance requires a valid rating family to work.
# All rating set names defined should be listed under "db.applic.rating.source" key
# 
# Properties associated with the rating set :
# Table to store user ratings.
# rating scale and special rating values.
# Column in the user_data table to update to notify sifter for sifting ( sift pri )
# addTransaction translator used to process ratings.
# addTransaction filter used for processing ratings.

# All rating families defined for this installation

db.applic.rating.source=prefRating

# prefRating used by engPreference an instance of preference engine. 

prefRating.table=lps_user_rating
prefRating.translator= generic_translator_type
prefRating.filter=generic_filter_type
prefRating.mentor_type = mentor_type
prefRating.sift_pri = sift_pri
# Each set can override global settings for the rating scale and special rating values
# This set uses global settings.
# prefRating.WontRate = 
# prefRating.Unrated = 
# prefRating.LowestRating = 
# prefRating.NumberRatingValues = 
# prefRating.BestBets.Threshold = 
# prefRating.WorstBets.Threshold = 
 
# Transaction set specifies how clickstream data is handled and used by an instance of clickstream or purchase engine.
# Every preference engine instance requires a valid rating family to work.
# All transaction set names defined should be listed under "db.applic.transaction.source" key
# 
# Properties associated with the transaction set :
# Table to store user clickstream.
# Column in the user_data table to update to notify sifter for sifting ( sift pri )
# addTransaction translator used to process clickstream.
# addTransaction filter used for processing clickstream.

# All clickstream families defined for this installation

db.applic.transaction.source= xactions

xactions.table=lps_user_trx
xactions.type=lps_trx_type
xactions.translator= generic_translator_type
xactions.filter=eCommerce_filter_type
xactions.mentor_type = mentor_type
xactions.sift_pri = sift_pri


# Item affinity set specifies source for item affinity engine and item affinity accumulator
# uses the properties of item affinity set in computing the item to item relationship.
# Every item affinity engine instance requires a valid item affinity set to work.
# All item affinity set names defined should be listed under "db.applic.item_affinity.source" key
# 
# Properties associated with the item_affinity set :
# Input table to read item baskets
# Description of basket ID
# Description of product ID
# Number of items to item relationships stored.
# Minimum support to accept an item.
# Table to store item to item relationship.

# All item_affinity set defined for this installation. This must be a comma delimited list 

db.applic.item_affinity.source = mbSample

mbSample.item_affinity.input.table = lps_user_trx
mbSample.item_affinity.basket_id.type = LPS
# mbSample.item_affinity.basket_id.field = <field1> [, <field2>, ..]
# mbSample.item_affinity.basket_id.constraint = user_id < 100
mbSample.item_affinity.product_id.type= LPS
# mbSample.item_affinity.product_id.field = <field1> [, <field2>, ..]
mbSample.item_affinity.num_threads = 4
mbSample.item_affinity.store.topn = 25
mbSample.item_affinity.min_support=0.0001
mbSample.item_affinity.max_support=0.75
# mbSample.item_affinity.product_id.constraint = type_id = 3
mbSample.item_affinity.output.table=lps_mba_scored


# Each instance of LPS server can have multiple engines of one or more type, each
# instance is identified by a unique name.
# Engines are loaded on demand except for the default engine.
# All engine instances should be listed under "db.applic.engine.source" key
# Default engine for all prediction API's if engine is not specified.

db.applic.engine.source=engPreference, engActivity, engSampleItmAffinity

engine.default=engPreference


# Global preference engine settings applicable for all instances.
# To overide a specific key add "<instance name>."  prefix.

db.engine.tune.disable.avg_user=true
db.engine.tune.consult.archetype_for_list=false
db.engine.tune.max_archetype_list_use=100000
db.engine.tune.max_cached_mentors=500
engine.titan.predictable.min_ratings_cutoff=2
engine.titan.predictable.min_items_cutoff= 1
engine.titan.predictable.loop_check_cutoff=10
engine.titan.predictable.confidence_cutoff=0
engine.titan.default_predictions.source=score
engine.titan.default_predictions.list_size=50%
engine.titan.recomputation_bound=60.0
engine.titan.default_predictions.Threshold = 6
engine.titan.useMentorPoolUsers=false
# Default prediction set will contain items which are above the value specified
# If this value is not set it will use the Besatbet threshold.
#engine.titan.default_predictions.Threshold=

# Global clickstream engine settings applicable for all instances.
# To overide a specific key add "<instance name>."  prefix.

engine.saturn.affinity.constructor=simple
engine.saturn.affinity.comparator=chisquared
engine.saturn.useMentorPoolUsers=false
engine.saturn.tune.max_cached_mentors=500
engine.saturn.tune.predictable.loop_check_cutoff=10
engine.saturn.tune.predictable.min_xactions_cutoff=10
engine.saturn.tune.predictable.confidence_cutoff=0
engine.saturn.tune.predictable.min_items_cutoff=1
# confidence-related parameters
engine.saturn.tune.confidenceVarExp=-0.5
engine.saturn.tune.confidenceMentorShift=1.0
engine.saturn.tune.confidenceScale=2.0
# Default predition list size of clickstream engine
# The size here specifies max items to store in user zero for each activity 
# type. For eg. If clicksteram engine is tracking 3 types of activities
# than the max size of default prediction will be 1500
engine.saturn.default_predictions.list_size=500
engine.saturn.recomputation_bound = 60.0



# Global matching engine settings applicable for all instances.
# To overide a specific key add "<instance name>."  prefix.

# db.engine.infer.attribute.<n>=<table>.<name>
# db.engine.infer.attribute.type.<table>.<name>=<type> # default is string (char
# or varchar), else int or double
# db.engine.infer.attribute.exclude.<table>.<name>=<val[, val ...]>
# db.engine.infer.attribute.tolerance.<table>.<name>=<numeric-val> # default = 0
# db.engine.infer.attribute.weight.<table>.<name>=<numeric-val> # default = .1

# To define a new instance of an engine the following keys should be set
# <instance name>.type = preference_engine_type or clickstream_engine_type 
#                        or matching_engine_type or item_affinity_engine_type
# engine.name.<instance name> = <Description for this engine instance>
# 

# Preference engine instance definition 
# Each instance should define a ratingSetFamily and Mentor group.

engPreference.type=preference_engine_type
engine.name.engPreference = "LikeMinds Correlation Engine"
engPreference.engine.titan.ratingSetFamily=prefRating
engPreference.engine.titan.mentorGroup=ratingMentor
engPreference.engine.titan.predictable.min_ratings_cutoff=2
engPreference.engine.titan.predictable.min_items_cutoff= 1
engPreference.engine.titan.predictable.loop_check_cutoff=10
engPreference.engine.titan.predictable.confidence_cutoff=0
engPreference.engine.titan.default_predictions.source=score
engPreference.engine.titan.default_predictions.list_size=50%
engPreference.engine.titan.recomputation_bound=60.0
engPreference.engine.titan.useMentorPoolUsers=false



# Clickstream engine instance definition 
# Each instance should define a transactionFamily and Mentor group.

engActivity.type=clickstream_engine_type
engine.name.engActivity = "LikeMinds Clickstream Engine"

engActivity.engine.saturn.transactionFamily=xactions
engActivity.engine.saturn.affinity.constructor=simple
engActivity.engine.saturn.mentorGroup=xactionMentor
engActivity.engine.saturn.affinity.comparator=chisquared
engActivity.engine.saturn.tune.max_cached_mentors=500
engActivity.engine.saturn.tune.predictable.loop_check_cutoff=10
engActivity.engine.saturn.tune.predictable.min_xactions_cutoff=10
engActivity.engine.saturn.tune.predictable.confidence_cutoff=0
engActivity.engine.saturn.tune.predictable.min_items_cutoff=1
engActivity.engine.saturn.tune.confidenceVarExp=-0.5
engActivity.engine.saturn.tune.confidenceMentorShift=1.0
engActivity.engine.saturn.tune.confidenceScale=2.0
engActivity.engine.saturn.recomputation_bound = 60.0
engActivity.engine.saturn.useMentorPoolUsers=false


# Item affinity engine instance definition 
# Each instance should define a source key which should be a valid affinity set. 

engSampleItmAffinity.type=item_affinity_engine_type
engine.name.engSampleItmAffinity = "LikeMinds Item Affinity Engine"
engSampleItmAffinity.item_affinity.source=mbSample


# Configuration for all LPS utilities
 
# List all engine instances for which mentors need to be computed.

db.applic.sifter.source=engPreference, engActivity

# Build stats

db.ratability.Num_rating.Coefficient=1.0
db.ratability.Num_rating.Power=1.0
db.ratability.StdDev.Coefficient=1.0
db.ratability.StdDev.Power=1.0
db.ratability.Age.Coefficient=2.7
db.ratability.Age.Power=0.5

# Prefetch

prefetch.handler.requestid = 1
db.tune.item_cache.load.maxitems=4000
db.tune.item_cache.items.perload=50


# Words to ignore in search string
pref.search.noise="the,in,a,of,and,on,&"

# Word mapping for search patterns
# The following format is used :
# pref.search.synonym.<word>=pattern1, pattern2, ..., patternN
# if the search string contains <word> then string patterns
# pattern1, pattern2, ..., patternN are also considered a perfect match.
pref.search.synonym.1="one,I"
pref.search.synonym.one="I,1"
pref.search.synonym.II="two,2"
pref.search.synonym.2="two,II"
pref.search.synonym.two="II,2"
pref.search.synonym.III="three,3"
pref.search.synonym.3="three,III"
pref.search.synonym.three="III,3"
pref.search.synonym.IV="four,4"
pref.search.synonym.4="four,IV"
pref.search.synonym.four="IV,4"
pref.search.synonym.V="five,5"
pref.search.synonym.5="five,V"
pref.search.synonym.five="V,5"
pref.search.synonym.X="10,ten"
pref.search.synonym.ten="10"
pref.search.synonym.10="ten"
pref.search.synonym.12="twelve"
pref.search.synonym.twelve="12"
pref.search.synonym.color="colour"
pref.search.synonym.colour="color"



# LikeMinds engines use symbolic names for database table and fields
# if the symbolic name does not match the actual field name uncomment 
# and set these to change db table and field names. These must agree with the 
# names in the actual data source. Note that fields occurring 
# in multiple tables must have the same name in each at this time.
# For instance, the user_id column must have the same name in every
# table in which it occurs.

db.table.name.User_data = lps_user_data
db.table.name.Item_data = lps_item_data
db.table.name.User_selector = lps_user_selector
db.table.name.Genre_data = lps_genre_data
db.table.name.Item_genre = lps_item_genre
db.table.name.visit_list =lps_visit_list

db.field.name.Timestamp = time_stamp
db.field.name.Time_stamp = time_stamp
db.field.name.User_id = user_id
db.field.name.Num_mentors = num_mentors
db.field.name.Num_item = num_item
db.field.name.Name = name
db.field.name.Password = password
db.field.name.Rating = rating
db.field.name.Item_id = item_id
db.field.name.Title = title
db.field.name.Year = year
db.field.name.Avrg_rtg = avrg_rtg
db.field.name.Num_rtg = num_rtg
db.field.name.Total_rtg = total_rtg
db.field.name.Total_square_rtg = total_square_rtg
db.field.name.Genre_id = cenre_id
db.field.name.Label = label
db.field.name.Image = image
db.field.name.trx_id = trx_id
db.field.name.type_id = type_id
db.field.name.type_name = type_name
db.field.name.value = value
db.field.name.adj_count = adj_count
db.field.name.abs_cvg = abs_cvg
db.field.name.num_trx = num_trx
db.field.name.resource_id = resource_id
db.field.name.resource_collection = resource_collection