Added support for user, user_roles, page, blog export and import.
Added basic support for comments, for now mainly viewing comments from database (no submission forms yet). Added first simple wikitext filter (render wikitext content as xhtml). Ensure response content type is text/html with utf-8 charset.
This commit is contained in:
@@ -11,7 +11,7 @@ inherit
|
||||
|
||||
feature -- Hook
|
||||
|
||||
import_from (a_impot_id_list: detachable ITERABLE [READABLE_STRING_GENERAL]; a_import_ctx: CMS_IMPORT_CONTEXT; a_response: CMS_RESPONSE)
|
||||
import_from (a_import_id_list: detachable ITERABLE [READABLE_STRING_GENERAL]; a_import_ctx: CMS_IMPORT_CONTEXT; a_response: CMS_RESPONSE)
|
||||
-- Import data identified by `a_import_id_list',
|
||||
-- or import all data if `a_import_id_list' is Void.
|
||||
deferred
|
||||
|
||||
@@ -10,6 +10,39 @@ class
|
||||
|
||||
feature -- Access
|
||||
|
||||
json_value_from_location (a_location: PATH): detachable JSON_VALUE
|
||||
local
|
||||
f: RAW_FILE
|
||||
s: STRING
|
||||
jp: JSON_PARSER
|
||||
do
|
||||
create f.make_with_path (a_location)
|
||||
if f.exists and then f.is_access_readable then
|
||||
f.open_read
|
||||
from
|
||||
create s.make (0)
|
||||
until
|
||||
f.exhausted or f.end_of_file
|
||||
loop
|
||||
f.read_stream (1_024)
|
||||
s.append (f.last_string)
|
||||
end
|
||||
f.close
|
||||
create jp.make_with_string (s)
|
||||
jp.parse_content
|
||||
if jp.is_valid then
|
||||
Result := jp.parsed_json_value
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
json_object_from_location (a_location: PATH): detachable JSON_OBJECT
|
||||
do
|
||||
if attached {JSON_OBJECT} json_value_from_location (a_location) as jo then
|
||||
Result := jo
|
||||
end
|
||||
end
|
||||
|
||||
json_string_item (j: JSON_OBJECT; a_name: READABLE_STRING_GENERAL): detachable STRING_32
|
||||
do
|
||||
if attached {JSON_STRING} j.item (a_name) as s then
|
||||
@@ -41,13 +74,46 @@ feature -- Access
|
||||
json_date_item (j: JSON_OBJECT; a_name: READABLE_STRING_GENERAL): detachable DATE_TIME
|
||||
local
|
||||
hd: HTTP_DATE
|
||||
i,y,m,d,h,min,sec: INTEGER
|
||||
s: STRING_8
|
||||
do
|
||||
if attached {JSON_NUMBER} j.item (a_name) as num then
|
||||
create hd.make_from_timestamp (num.integer_64_item)
|
||||
Result := hd.date_time
|
||||
elseif attached {JSON_STRING} j.item (a_name) as s then
|
||||
create hd.make_from_string (s.unescaped_string_32)
|
||||
Result := hd.date_time
|
||||
elseif attached {JSON_STRING} j.item (a_name) as js then
|
||||
s := js.unescaped_string_8
|
||||
|
||||
-- Parse yyyy-mm-dd hh:mm:ss
|
||||
-- 1234567890123456789
|
||||
i := s.index_of ('-', 1)
|
||||
if i = 5 then
|
||||
y := s.substring (1, i - 1).to_integer
|
||||
i := s.index_of ('-', i + 1)
|
||||
if i = 8 then
|
||||
m := s.substring (6, i - 1).to_integer
|
||||
i := s.index_of (' ', i + 1)
|
||||
if i = 11 then
|
||||
d := s.substring (9, i - 1).to_integer
|
||||
i := s.index_of (':', i + 1)
|
||||
if i = 14 then
|
||||
h := s.substring (12, i - 1).to_integer
|
||||
i := s.index_of (':', i + 1)
|
||||
if i = 17 then
|
||||
min := s.substring (15, i - 1).to_integer
|
||||
sec := s.substring (i + 1, s.count).to_integer
|
||||
create Result.make (y,m,d,h,min,sec)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
if Result = Void then
|
||||
create hd.make_from_string (s)
|
||||
if hd.has_error then
|
||||
else
|
||||
Result := hd.date_time
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -48,11 +48,11 @@ feature -- Access
|
||||
password: Result /= Void implies (Result.hashed_password /= Void and Result.password = Void)
|
||||
end
|
||||
|
||||
user_by_email (a_email: like {CMS_USER}.email): detachable CMS_USER
|
||||
user_by_email (a_email: READABLE_STRING_GENERAL): detachable CMS_USER
|
||||
-- User with name `a_email', if any.
|
||||
deferred
|
||||
ensure
|
||||
same_email: Result /= Void implies a_email ~ Result.email
|
||||
same_email: Result /= Void implies (attached Result.email as r_email and then a_email.same_string (r_email))
|
||||
password: Result /= Void implies (Result.hashed_password /= Void and Result.password = Void)
|
||||
end
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ feature -- Access: user
|
||||
do
|
||||
end
|
||||
|
||||
user_by_email (a_email: like {CMS_USER}.email): detachable CMS_USER
|
||||
user_by_email (a_email: READABLE_STRING_GENERAL): detachable CMS_USER
|
||||
do
|
||||
end
|
||||
|
||||
@@ -204,6 +204,6 @@ feature -- Temp Users
|
||||
do
|
||||
end
|
||||
note
|
||||
copyright: "2011-2016, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
copyright: "2011-2017, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
license: "Eiffel Forum License v2 (see http://www.eiffel.com/licensing/forum.txt)"
|
||||
end
|
||||
|
||||
@@ -95,7 +95,7 @@ feature -- Access: user
|
||||
sql_finalize
|
||||
end
|
||||
|
||||
user_by_email (a_email: like {CMS_USER}.email): detachable CMS_USER
|
||||
user_by_email (a_email: READABLE_STRING_GENERAL): detachable CMS_USER
|
||||
-- User for the given email `a_email', if any.
|
||||
local
|
||||
l_parameters: STRING_TABLE [detachable ANY]
|
||||
|
||||
@@ -7,14 +7,14 @@ class
|
||||
CMS_API
|
||||
|
||||
inherit
|
||||
ANY
|
||||
CMS_HOOK_EXPORT
|
||||
CMS_API_EXPORT_IMP
|
||||
|
||||
CMS_HOOK_IMPORT
|
||||
CMS_API_IMPORT_IMP
|
||||
|
||||
CMS_ENCODERS
|
||||
|
||||
CMS_HOOK_EXPORT
|
||||
|
||||
CMS_EXPORT_JSON_UTILITIES
|
||||
|
||||
REFACTORING_HELPER
|
||||
|
||||
create
|
||||
@@ -503,7 +503,14 @@ feature {NONE} -- Hooks
|
||||
end
|
||||
end
|
||||
|
||||
feature -- Query: API
|
||||
feature {NONE} -- Access: API
|
||||
|
||||
cms_api: CMS_API
|
||||
do
|
||||
Result := Current
|
||||
end
|
||||
|
||||
feature -- Access: API
|
||||
|
||||
user_api: CMS_USER_API
|
||||
-- API to access user related data.
|
||||
@@ -524,6 +531,7 @@ feature -- Hooks
|
||||
-- Register hooks associated with the cms core.
|
||||
do
|
||||
a_hooks.subscribe_to_export_hook (Current)
|
||||
a_hooks.subscribe_to_import_hook (Current)
|
||||
end
|
||||
|
||||
feature -- Path aliases
|
||||
@@ -618,7 +626,7 @@ feature {NONE}-- Implementation
|
||||
-- Error handler.
|
||||
|
||||
internal_user_api: detachable like user_api
|
||||
-- Cached value for `user_api'.
|
||||
-- Cached value for `user_api`.
|
||||
|
||||
feature -- Environment/ theme
|
||||
|
||||
@@ -802,112 +810,6 @@ feature -- Environment/ modules and theme
|
||||
Result := module_configuration_by_name (a_module.name, a_name)
|
||||
end
|
||||
|
||||
feature -- Hook
|
||||
|
||||
export_to (a_export_id_list: detachable ITERABLE [READABLE_STRING_GENERAL]; a_export_ctx: CMS_EXPORT_CONTEXT; a_response: CMS_RESPONSE)
|
||||
-- <Precursor>.
|
||||
local
|
||||
p: PATH
|
||||
d: DIRECTORY
|
||||
ja: JSON_ARRAY
|
||||
jobj,jo,j: JSON_OBJECT
|
||||
f: PLAIN_TEXT_FILE
|
||||
u: CMS_USER
|
||||
do
|
||||
if attached a_response.has_permissions (<<"admin export", "export core">>) then
|
||||
if a_export_id_list = Void then -- Include everything
|
||||
p := a_export_ctx.location.extended ("core")
|
||||
create d.make_with_path (p)
|
||||
if not d.exists then
|
||||
d.recursive_create_dir
|
||||
end
|
||||
|
||||
-- path_aliases export.
|
||||
a_export_ctx.log ("Exporting path_aliases")
|
||||
create jo.make_empty
|
||||
across storage.path_aliases as ic loop
|
||||
jo.put_string (ic.item, ic.key)
|
||||
end
|
||||
create f.make_with_path (p.extended ("path_aliases.json"))
|
||||
f.create_read_write
|
||||
f.put_string (json_to_string (jo))
|
||||
f.close
|
||||
|
||||
-- custom_values export.
|
||||
if attached storage.custom_values as lst then
|
||||
a_export_ctx.log ("Exporting custom_values")
|
||||
create ja.make_empty
|
||||
across
|
||||
lst as ic
|
||||
loop
|
||||
create j.make_empty
|
||||
if attached ic.item.type as l_type then
|
||||
j.put_string (l_type, "type")
|
||||
end
|
||||
j.put_string (ic.item.name, "name")
|
||||
if attached ic.item.type as l_value then
|
||||
j.put_string (l_value, "value")
|
||||
end
|
||||
ja.extend (j)
|
||||
end
|
||||
create f.make_with_path (p.extended ("custom_values.json"))
|
||||
f.create_read_write
|
||||
f.put_string (json_to_string (ja))
|
||||
f.close
|
||||
end
|
||||
|
||||
-- users export.
|
||||
a_export_ctx.log ("Exporting users")
|
||||
create jo.make_empty
|
||||
|
||||
create jobj.make_empty
|
||||
across user_api.recent_users (create {CMS_DATA_QUERY_PARAMETERS}.make (0, user_api.users_count.as_natural_32)) as ic loop
|
||||
u := ic.item
|
||||
create j.make_empty
|
||||
j.put_string (u.name, "name")
|
||||
j.put_integer (u.status, "status")
|
||||
put_string_into_json (u.email, "email", j)
|
||||
put_string_into_json (u.password, "password", j)
|
||||
put_string_into_json (u.hashed_password, "hashed_password", j)
|
||||
put_date_into_json (u.creation_date, "creation_date", j)
|
||||
put_date_into_json (u.last_login_date, "last_login_date", j)
|
||||
if attached u.roles as l_roles then
|
||||
create ja.make (l_roles.count)
|
||||
across
|
||||
l_roles as roles_ic
|
||||
loop
|
||||
ja.extend (create {JSON_STRING}.make_from_string_32 ({STRING_32} " %"" + roles_ic.item.name + {STRING_32} "%" #" + roles_ic.item.id.out))
|
||||
end
|
||||
j.put (ja, "roles")
|
||||
end
|
||||
jobj.put (j, u.id.out)
|
||||
end
|
||||
jo.put (jobj, "users")
|
||||
|
||||
create jobj.make_empty
|
||||
across user_api.roles as ic loop
|
||||
create j.make_empty
|
||||
j.put_string (ic.item.name, "name")
|
||||
if attached ic.item.permissions as l_perms then
|
||||
create ja.make (l_perms.count)
|
||||
across
|
||||
l_perms as perms_ic
|
||||
loop
|
||||
ja.extend (create {JSON_STRING}.make_from_string (perms_ic.item))
|
||||
end
|
||||
j.put (ja, "permissions")
|
||||
end
|
||||
jobj.put (j, ic.item.id.out)
|
||||
end
|
||||
jo.put (jobj, "roles")
|
||||
create f.make_with_path (p.extended ("users.json"))
|
||||
f.create_read_write
|
||||
f.put_string (json_to_string (jo))
|
||||
f.close
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
feature -- Access: active user
|
||||
|
||||
user_is_authenticated: BOOLEAN
|
||||
|
||||
142
src/service/cms_api_export_imp.e
Normal file
142
src/service/cms_api_export_imp.e
Normal file
@@ -0,0 +1,142 @@
|
||||
note
|
||||
description: "Summary description for {CMS_API_EXPORT_IMP}."
|
||||
date: "$Date$"
|
||||
revision: "$Revision$"
|
||||
|
||||
deferred class
|
||||
CMS_API_EXPORT_IMP
|
||||
|
||||
inherit
|
||||
CMS_ENCODERS
|
||||
|
||||
CMS_HOOK_EXPORT
|
||||
|
||||
CMS_EXPORT_JSON_UTILITIES
|
||||
|
||||
CMS_FILE_SYSTEM_UTILITIES
|
||||
|
||||
feature {NONE} -- Query: API
|
||||
|
||||
user_api: CMS_USER_API
|
||||
-- API to access user related data.
|
||||
deferred
|
||||
end
|
||||
|
||||
cms_api: CMS_API
|
||||
deferred
|
||||
end
|
||||
|
||||
feature -- Export
|
||||
|
||||
export_to (a_export_id_list: detachable ITERABLE [READABLE_STRING_GENERAL]; a_export_ctx: CMS_EXPORT_CONTEXT; a_response: CMS_RESPONSE)
|
||||
-- <Precursor>.
|
||||
local
|
||||
p: PATH
|
||||
d: DIRECTORY
|
||||
ja: JSON_ARRAY
|
||||
jobj,jo,j: JSON_OBJECT
|
||||
f: PLAIN_TEXT_FILE
|
||||
u: CMS_USER
|
||||
do
|
||||
if attached a_response.has_permissions (<<"admin export", "export core">>) then
|
||||
if a_export_id_list = Void then -- Include everything
|
||||
p := a_export_ctx.location.extended ("core")
|
||||
create d.make_with_path (p)
|
||||
if not d.exists then
|
||||
d.recursive_create_dir
|
||||
end
|
||||
|
||||
-- path_aliases export.
|
||||
a_export_ctx.log ("Exporting path_aliases")
|
||||
create jo.make_empty
|
||||
across cms_api.storage.path_aliases as ic loop
|
||||
jo.put_string (ic.item, ic.key)
|
||||
end
|
||||
create f.make_with_path (p.extended ("path_aliases.json"))
|
||||
f.create_read_write
|
||||
f.put_string (json_to_string (jo))
|
||||
f.close
|
||||
|
||||
-- custom_values export.
|
||||
if attached cms_api.storage.custom_values as lst then
|
||||
a_export_ctx.log ("Exporting custom_values")
|
||||
create ja.make_empty
|
||||
across
|
||||
lst as ic
|
||||
loop
|
||||
create j.make_empty
|
||||
if attached ic.item.type as l_type then
|
||||
j.put_string (l_type, "type")
|
||||
end
|
||||
j.put_string (ic.item.name, "name")
|
||||
if attached ic.item.type as l_value then
|
||||
j.put_string (l_value, "value")
|
||||
end
|
||||
ja.extend (j)
|
||||
end
|
||||
create f.make_with_path (p.extended ("custom_values.json"))
|
||||
f.create_read_write
|
||||
f.put_string (json_to_string (ja))
|
||||
f.close
|
||||
end
|
||||
|
||||
-- user roles export.
|
||||
a_export_ctx.log ("Exporting user roles")
|
||||
|
||||
create jobj.make_empty
|
||||
across user_api.roles as ic loop
|
||||
create j.make_empty
|
||||
j.put_string (ic.item.name, "name")
|
||||
if attached ic.item.permissions as l_perms then
|
||||
create ja.make (l_perms.count)
|
||||
across
|
||||
l_perms as perms_ic
|
||||
loop
|
||||
ja.extend (create {JSON_STRING}.make_from_string (perms_ic.item))
|
||||
end
|
||||
j.put (ja, "permissions")
|
||||
end
|
||||
jobj.put (j, ic.item.id.out)
|
||||
end
|
||||
create f.make_with_path (p.extended ("user_roles.json"))
|
||||
f.create_read_write
|
||||
f.put_string (json_to_string (jo))
|
||||
f.close
|
||||
|
||||
-- users export.
|
||||
a_export_ctx.log ("Exporting users")
|
||||
|
||||
create jobj.make_empty
|
||||
across user_api.recent_users (create {CMS_DATA_QUERY_PARAMETERS}.make (0, user_api.users_count.as_natural_32)) as ic loop
|
||||
u := ic.item
|
||||
create j.make_empty
|
||||
j.put_string (u.name, "name")
|
||||
j.put_integer (u.status, "status")
|
||||
put_string_into_json (u.email, "email", j)
|
||||
put_string_into_json (u.password, "password", j)
|
||||
put_string_into_json (u.hashed_password, "hashed_password", j)
|
||||
put_date_into_json (u.creation_date, "creation_date", j)
|
||||
put_date_into_json (u.last_login_date, "last_login_date", j)
|
||||
if attached u.roles as l_roles then
|
||||
create ja.make (l_roles.count)
|
||||
across
|
||||
l_roles as roles_ic
|
||||
loop
|
||||
ja.extend (create {JSON_STRING}.make_from_string_32 ({STRING_32} " %"" + roles_ic.item.name + {STRING_32} "%" #" + roles_ic.item.id.out))
|
||||
end
|
||||
j.put (ja, "roles")
|
||||
end
|
||||
jobj.put (j, u.id.out)
|
||||
end
|
||||
create f.make_with_path (p.extended ("users.json"))
|
||||
f.create_read_write
|
||||
f.put_string (json_to_string (jobj))
|
||||
f.close
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
note
|
||||
copyright: "2011-2017, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
license: "Eiffel Forum License v2 (see http://www.eiffel.com/licensing/forum.txt)"
|
||||
end
|
||||
225
src/service/cms_api_import_imp.e
Normal file
225
src/service/cms_api_import_imp.e
Normal file
@@ -0,0 +1,225 @@
|
||||
note
|
||||
description: "Summary description for {CMS_API_IMPORT_IMP}."
|
||||
author: ""
|
||||
date: "$Date$"
|
||||
revision: "$Revision$"
|
||||
|
||||
deferred class
|
||||
CMS_API_IMPORT_IMP
|
||||
|
||||
inherit
|
||||
CMS_ENCODERS
|
||||
|
||||
CMS_HOOK_IMPORT
|
||||
|
||||
CMS_IMPORT_JSON_UTILITIES
|
||||
|
||||
CMS_FILE_SYSTEM_UTILITIES
|
||||
|
||||
feature {NONE} -- Query: API
|
||||
|
||||
user_api: CMS_USER_API
|
||||
-- API to access user related data.
|
||||
deferred
|
||||
end
|
||||
|
||||
cms_api: CMS_API
|
||||
deferred
|
||||
end
|
||||
|
||||
feature -- Import
|
||||
|
||||
import_from (a_import_id_list: detachable ITERABLE [READABLE_STRING_GENERAL]; a_import_ctx: CMS_IMPORT_CONTEXT; a_response: CMS_RESPONSE)
|
||||
-- Import data identified by `a_import_id_list',
|
||||
-- or import all data if `a_import_id_list' is Void.
|
||||
local
|
||||
p: PATH
|
||||
d: DIRECTORY
|
||||
l_id: STRING_32
|
||||
do
|
||||
-- User roles
|
||||
if
|
||||
a_import_id_list = Void
|
||||
or else across a_import_id_list as ic some ic.item.same_string ("user_roles") end
|
||||
then
|
||||
if
|
||||
a_response.has_permissions (<<"import core">>)
|
||||
then
|
||||
a_import_ctx.log ("Importing user roles...")
|
||||
-- From "core" location
|
||||
if attached json_object_from_location (a_import_ctx.location.extended ("core").extended ("user_roles.json")) as j_user_roles then
|
||||
across
|
||||
j_user_roles as j_ic
|
||||
loop
|
||||
if attached {JSON_OBJECT} j_ic.item as j_user_role then
|
||||
import_json_user_role (j_user_role, a_import_ctx)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Users
|
||||
if
|
||||
a_import_id_list = Void
|
||||
or else across a_import_id_list as ic some ic.item.same_string ("users") end
|
||||
then
|
||||
if
|
||||
a_response.has_permissions (<<"import core">>)
|
||||
then
|
||||
a_import_ctx.log ("Importing users...")
|
||||
|
||||
-- From "core" location
|
||||
if attached json_object_from_location (a_import_ctx.location.extended ("core").extended ("users.json")) as j_users then
|
||||
across
|
||||
j_users as j_ic
|
||||
loop
|
||||
if attached {JSON_OBJECT} j_ic.item as j_user then
|
||||
import_json_user (j_user, a_import_ctx)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
if
|
||||
a_response.has_permissions (<<"import users">>)
|
||||
then
|
||||
-- From "users" location
|
||||
p := a_import_ctx.location.extended ("users")
|
||||
create d.make_with_path (p)
|
||||
if d.exists and then d.is_readable then
|
||||
a_import_ctx.log ("Importing users ..")
|
||||
across
|
||||
d.entries as ic
|
||||
loop
|
||||
if attached ic.item.extension as ext and then ext.same_string_general ("json") then
|
||||
l_id := ic.item.name
|
||||
l_id.remove_tail (ext.count + 1)
|
||||
if attached json_object_from_location (p.extended_path (ic.item)) as j_user then
|
||||
import_json_user (j_user, a_import_ctx)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- User roles
|
||||
if
|
||||
a_import_id_list = Void
|
||||
or else across a_import_id_list as ic some ic.item.same_string ("files") end
|
||||
then
|
||||
if
|
||||
a_response.has_permissions (<<"import files">>)
|
||||
then
|
||||
a_import_ctx.log ("Importing files roles...")
|
||||
-- From "core" location
|
||||
p := a_import_ctx.location.extended ("files")
|
||||
if attached files_from_location (p, True) as l_files then
|
||||
across
|
||||
l_files as ic
|
||||
loop
|
||||
import_file (ic.item, p, a_import_ctx)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
import_file (a_file_path: PATH; a_root_path: PATH; a_import_ctx: CMS_IMPORT_CONTEXT)
|
||||
local
|
||||
b: BOOLEAN
|
||||
dst: PATH
|
||||
do
|
||||
if attached relative_path_inside (a_file_path, a_root_path) as rel_path then
|
||||
dst := cms_api.files_location.extended_path (rel_path)
|
||||
b := safe_copy_file (a_file_path, dst)
|
||||
if b then
|
||||
a_import_ctx.log ("Imported file %"" + a_file_path.utf_8_name + "%" to %"" + dst.utf_8_name + "%".")
|
||||
else
|
||||
a_import_ctx.log ("ERROR: unable to import file %"" + a_file_path.utf_8_name + "%" !!!")
|
||||
end
|
||||
else
|
||||
check a_file_path_in_root_directory: False end
|
||||
a_import_ctx.log ("ERROR: unable to import file %"" + a_file_path.utf_8_name + "%" (not in root directory) !")
|
||||
end
|
||||
end
|
||||
|
||||
import_json_user_role (j_user_role: JSON_OBJECT; a_import_ctx: CMS_IMPORT_CONTEXT)
|
||||
local
|
||||
do
|
||||
if attached json_to_user_role (j_user_role) as ur then
|
||||
if user_api.user_role_by_name (ur.name) = Void then
|
||||
a_import_ctx.log ("new user role %"" + ur.name + "%".")
|
||||
user_api.save_user_role (ur)
|
||||
else
|
||||
a_import_ctx.log ("Skip user role %"" + ur.name + "%" : already exists!")
|
||||
-- Already exists!
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
import_json_user (j_user: JSON_OBJECT; a_import_ctx: CMS_IMPORT_CONTEXT)
|
||||
local
|
||||
l_user_by_name, l_user_by_email: detachable CMS_USER
|
||||
do
|
||||
if attached json_to_user (j_user) as u then
|
||||
l_user_by_name := user_api.user_by_name (u.name)
|
||||
if attached u.email as l_email then
|
||||
l_user_by_email := user_api.user_by_email (l_email)
|
||||
end
|
||||
if l_user_by_name /= Void or l_user_by_email /= Void then
|
||||
a_import_ctx.log ("Skip user %"" + u.name + "%": already exists!")
|
||||
-- Already exists!
|
||||
else
|
||||
user_api.new_user (u)
|
||||
a_import_ctx.log ("New user %"" + u.name + "%" -> " + u.id.out + " .")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
json_to_user_role (j: JSON_OBJECT): detachable CMS_USER_ROLE
|
||||
do
|
||||
if attached json_string_item (j, "name") as l_name and then not l_name.is_whitespace then
|
||||
create Result.make (l_name)
|
||||
if attached {JSON_ARRAY} j.item ("permissions") as j_permissions then
|
||||
across
|
||||
j_permissions as ic
|
||||
loop
|
||||
if attached {JSON_STRING} ic.item as j_permission then
|
||||
Result.add_permission (j_permission.unescaped_string_8)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
json_to_user (j: JSON_OBJECT): detachable CMS_USER
|
||||
local
|
||||
l_roles: ARRAYED_LIST [CMS_USER_ROLE]
|
||||
do
|
||||
if attached json_string_item (j, "name") as l_name and then not l_name.is_whitespace then
|
||||
create Result.make (l_name)
|
||||
Result.set_password ("")
|
||||
if attached json_string_8_item (j, "email") as l_email then
|
||||
Result.set_email (l_email)
|
||||
end
|
||||
if attached {JSON_ARRAY} j.item ("roles") as j_roles then
|
||||
create l_roles.make (j_roles.count)
|
||||
across
|
||||
j_roles as ic
|
||||
loop
|
||||
if attached {JSON_STRING} ic.item as j_role then
|
||||
if attached user_api.user_role_by_name (j_role.unescaped_string_32) as ur then
|
||||
l_roles.extend (ur)
|
||||
end
|
||||
end
|
||||
end
|
||||
Result.set_roles (l_roles)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
note
|
||||
copyright: "2011-2017, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
license: "Eiffel Forum License v2 (see http://www.eiffel.com/licensing/forum.txt)"
|
||||
end
|
||||
124
src/service/cms_file_system_utilities.e
Normal file
124
src/service/cms_file_system_utilities.e
Normal file
@@ -0,0 +1,124 @@
|
||||
note
|
||||
description: "Routines to manipulate files, directories, ..."
|
||||
date: "$Date$"
|
||||
revision: "$Revision$"
|
||||
|
||||
class
|
||||
CMS_FILE_SYSTEM_UTILITIES
|
||||
|
||||
feature -- Files
|
||||
|
||||
relative_path_inside (a_path: PATH; a_root_path: PATH): detachable PATH
|
||||
-- Relative path from `a_root_path` to `a_path`, or Void if `a_path` is not inside `a_root_path`.
|
||||
local
|
||||
lst,root_lst: LIST [PATH]
|
||||
err: BOOLEAN
|
||||
do
|
||||
lst := a_path.components
|
||||
root_lst := a_root_path.components
|
||||
if lst.count >= root_lst.count then
|
||||
from
|
||||
lst.start
|
||||
root_lst.start
|
||||
until
|
||||
root_lst.after or err
|
||||
loop
|
||||
if lst.item.same_as (root_lst.item) then
|
||||
lst.forth
|
||||
root_lst.forth
|
||||
else
|
||||
err := True
|
||||
end
|
||||
end
|
||||
if not err then
|
||||
from
|
||||
create Result.make_empty
|
||||
until
|
||||
lst.after
|
||||
loop
|
||||
Result := Result.extended_path (lst.item)
|
||||
lst.forth
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
files_from_location (a_loc: PATH; is_recursive: BOOLEAN): detachable LIST [PATH]
|
||||
local
|
||||
d: DIRECTORY
|
||||
f: RAW_FILE
|
||||
p: PATH
|
||||
retried: BOOLEAN
|
||||
do
|
||||
if not retried then
|
||||
create {ARRAYED_LIST [PATH]} Result.make (0)
|
||||
create d.make_with_path (a_loc)
|
||||
if d.exists then
|
||||
across
|
||||
d.entries as ic
|
||||
loop
|
||||
if ic.item.is_current_symbol or ic.item.is_parent_symbol then
|
||||
-- Ignore
|
||||
else
|
||||
p := a_loc.extended_path (ic.item)
|
||||
create f.make_with_path (p)
|
||||
if f.is_directory then
|
||||
if is_recursive and then attached files_from_location (p, is_recursive) as lst then
|
||||
across
|
||||
lst as lst_ic
|
||||
loop
|
||||
Result.force (lst_ic.item)
|
||||
end
|
||||
end
|
||||
elseif f.exists then
|
||||
Result.force (p)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
rescue
|
||||
retried := True
|
||||
retry
|
||||
end
|
||||
|
||||
safe_copy_file (src,dst: PATH): BOOLEAN
|
||||
-- Copy file from `src` to `dst'
|
||||
-- and return True on success, False on failure.
|
||||
local
|
||||
retried: BOOLEAN
|
||||
f_src, f_dst: RAW_FILE
|
||||
d: DIRECTORY
|
||||
do
|
||||
Result := False
|
||||
if retried then
|
||||
Result := False
|
||||
else
|
||||
create f_src.make_with_path (src)
|
||||
if f_src.exists and then f_src.is_access_readable then
|
||||
if attached dst.parent as l_parent then
|
||||
create d.make_with_path (l_parent)
|
||||
if not d.exists then
|
||||
d.recursive_create_dir
|
||||
end
|
||||
end
|
||||
create f_dst.make_with_path (dst)
|
||||
if not f_dst.exists or else f_dst.is_access_writable then
|
||||
f_src.open_read
|
||||
f_dst.open_write
|
||||
f_src.copy_to (f_dst)
|
||||
f_dst.close
|
||||
f_src.close
|
||||
Result := True -- Succeed!
|
||||
end
|
||||
end
|
||||
end
|
||||
rescue
|
||||
retried := True
|
||||
retry
|
||||
end
|
||||
|
||||
note
|
||||
copyright: "2011-2017, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
license: "Eiffel Forum License v2 (see http://www.eiffel.com/licensing/forum.txt)"
|
||||
end
|
||||
@@ -1380,6 +1380,8 @@ feature -- Helpers: cms link
|
||||
feature -- Helpers: html links
|
||||
|
||||
user_html_link (u: CMS_USER): STRING
|
||||
require
|
||||
u_with_name: not u.name.is_whitespace
|
||||
do
|
||||
Result := link (u.name, "user/" + u.id.out, Void)
|
||||
end
|
||||
@@ -1481,6 +1483,6 @@ feature {NONE} -- Execution
|
||||
end
|
||||
|
||||
note
|
||||
copyright: "2011-2016, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
copyright: "2011-2017, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
license: "Eiffel Forum License v2 (see http://www.eiffel.com/licensing/forum.txt)"
|
||||
end
|
||||
|
||||
@@ -28,7 +28,7 @@ feature -- Access: user
|
||||
Result := storage.user_by_name (a_username)
|
||||
end
|
||||
|
||||
user_by_email (a_email: READABLE_STRING_32): detachable CMS_USER
|
||||
user_by_email (a_email: READABLE_STRING_GENERAL): detachable CMS_USER
|
||||
-- User by email `a_email', if any.
|
||||
do
|
||||
Result := storage.user_by_email (a_email)
|
||||
|
||||
401
src/support/date_time_ago_converter.e
Normal file
401
src/support/date_time_ago_converter.e
Normal file
@@ -0,0 +1,401 @@
|
||||
note
|
||||
description: "Summary description for {DATE_TIME_AGO_CONVERTER}."
|
||||
author: ""
|
||||
date: "$Date$"
|
||||
revision: "$Revision$"
|
||||
|
||||
class
|
||||
DATE_TIME_AGO_CONVERTER
|
||||
|
||||
create
|
||||
make
|
||||
|
||||
feature {NONE} -- Initialization
|
||||
|
||||
make
|
||||
local
|
||||
dt_now, dt_now_utc: DATE_TIME
|
||||
l_duration: DATE_TIME_DURATION --like {DATE_TIME}.relative_duration
|
||||
do
|
||||
create dt_now.make_now
|
||||
create dt_now_utc.make_now_utc
|
||||
|
||||
l_duration := dt_now_utc.relative_duration (dt_now)
|
||||
utc_offset := l_duration.hour
|
||||
|
||||
smart_date_kind := smart_date_duration_kind
|
||||
end
|
||||
|
||||
feature -- Access
|
||||
|
||||
append_date_to (a_text: READABLE_STRING_GENERAL; a_utc_date_time: detachable DATE_TIME; a_output: STRING_GENERAL)
|
||||
require
|
||||
valid_smart_date_kind: smart_date_kind > 0 implies valid_smart_date_kind (smart_date_kind)
|
||||
do
|
||||
if a_utc_date_time /= Void then
|
||||
inspect smart_date_kind
|
||||
when Smart_date_duration_kind then
|
||||
a_output.append (smart_date_duration (a_utc_date_time))
|
||||
when Smart_date_short_kind then
|
||||
a_output.append (short_date (a_utc_date_time))
|
||||
else
|
||||
if attached date_time_format as l_format then
|
||||
a_output.append (formatted_date_time (timezoned_date_time (a_utc_date_time), l_format))
|
||||
else
|
||||
a_output.append (a_text)
|
||||
end
|
||||
end
|
||||
else
|
||||
a_output.append (a_text)
|
||||
end
|
||||
end
|
||||
|
||||
date_time_format: detachable STRING
|
||||
|
||||
utc_offset: INTEGER
|
||||
utc_offset_minute: INTEGER
|
||||
|
||||
smart_date_kind: INTEGER
|
||||
|
||||
feature -- Constants
|
||||
|
||||
smart_date_none_kind: INTEGER = 0
|
||||
|
||||
smart_date_duration_kind: INTEGER = 1
|
||||
|
||||
smart_date_short_kind: INTEGER = 2
|
||||
|
||||
-- smart_date_none_kind_string: STRING = "none"
|
||||
|
||||
-- smart_date_duration_kind_string: STRING = "duration"
|
||||
|
||||
-- smart_date_short_kind_string: STRING = "short date"
|
||||
|
||||
valid_smart_date_kind (k: INTEGER): BOOLEAN
|
||||
do
|
||||
inspect
|
||||
k
|
||||
when
|
||||
smart_date_none_kind,
|
||||
smart_date_duration_kind,
|
||||
smart_date_short_kind
|
||||
then
|
||||
Result := True
|
||||
else
|
||||
end
|
||||
end
|
||||
|
||||
feature -- Output
|
||||
|
||||
formatted_date_time (a_date_time: DATE_TIME; a_date_time_format: STRING): STRING_8
|
||||
local
|
||||
y,m,d,h,mn,sec: INTEGER
|
||||
s32: STRING_32
|
||||
s: STRING
|
||||
c: CHARACTER_32
|
||||
i: INTEGER
|
||||
do
|
||||
create s32.make (a_date_time_format.count)
|
||||
from
|
||||
i := 1
|
||||
m := a_date_time.month
|
||||
y := a_date_time.year
|
||||
d := a_date_time.day
|
||||
h := a_date_time.hour
|
||||
mn := a_date_time.minute
|
||||
sec := a_date_time.second
|
||||
until
|
||||
i > a_date_time_format.count
|
||||
loop
|
||||
c := a_date_time_format[i]
|
||||
inspect c
|
||||
when 'Y' then s32.append_integer (y)
|
||||
when 'y' then
|
||||
s := y.out
|
||||
s.keep_tail (2)
|
||||
s32.append_string (s)
|
||||
when 'm' then
|
||||
if m < 10 then
|
||||
s32.append_integer (0)
|
||||
end
|
||||
s32.append_integer (m)
|
||||
when 'n' then s32.append_integer (m)
|
||||
when 'M' then
|
||||
s := a_date_time.months_text [m].string
|
||||
s.to_lower; s.put (s.item (1).as_upper, 1); s32.append_string (s)
|
||||
when 'F' then
|
||||
s := a_date_time.long_months_text [m].string
|
||||
s.to_lower; s.put (s.item (1).as_upper, 1); s32.append_string (s)
|
||||
when 'D' then
|
||||
s := a_date_time.days_text [a_date_time.date.day_of_the_week].string
|
||||
s.to_lower; s.put (s.item (1).as_upper, 1); s32.append_string (s)
|
||||
when 'l' then
|
||||
s := a_date_time.long_days_text [a_date_time.date.day_of_the_week].string
|
||||
s.to_lower; s.put (s.item (1).as_upper, 1); s32.append_string (s)
|
||||
|
||||
when 'd' then
|
||||
if d < 10 then
|
||||
s32.append_integer (0)
|
||||
end
|
||||
s32.append_integer (d)
|
||||
when 'j' then
|
||||
s32.append_integer (d)
|
||||
-- when 'z' then s32.append_integer (a_date_time.date.*year)
|
||||
when 'a' then
|
||||
if h >= 12 then
|
||||
s32.append_character ('p'); s32.append_character ('m')
|
||||
else
|
||||
s32.append_character ('a'); s32.append_character ('m')
|
||||
end
|
||||
when 'A' then
|
||||
if h >= 12 then
|
||||
s32.append_character ('P'); s32.append_character ('M')
|
||||
else
|
||||
s32.append_character ('A'); s32.append_character ('M')
|
||||
end
|
||||
when 'g','h' then
|
||||
if h >= 12 then
|
||||
if c = 'h' and h - 12 < 10 then
|
||||
s32.append_integer (0)
|
||||
end
|
||||
s32.append_integer (h - 12)
|
||||
else
|
||||
if c = 'h' and h < 10 then
|
||||
s32.append_integer (0)
|
||||
end
|
||||
s32.append_integer (h)
|
||||
end
|
||||
when 'G', 'H' then
|
||||
if c = 'H' and h < 10 then
|
||||
s32.append_integer (0)
|
||||
end
|
||||
s32.append_integer (h)
|
||||
when 'i' then
|
||||
if mn < 10 then
|
||||
s32.append_integer (0)
|
||||
end
|
||||
s32.append_integer (mn)
|
||||
when 's' then
|
||||
if sec < 10 then
|
||||
s32.append_integer (0)
|
||||
end
|
||||
s32.append_integer (sec)
|
||||
when 'u' then
|
||||
s32.append_double (a_date_time.fine_second) -- CHECK result ...
|
||||
when 'w' then s32.append_integer (a_date_time.date.day_of_the_week - 1)
|
||||
when 'W' then s32.append_integer (a_date_time.date.week_of_year)
|
||||
when 'L' then
|
||||
if a_date_time.is_leap_year (y) then
|
||||
s32.append_integer (1)
|
||||
else
|
||||
s32.append_integer (0)
|
||||
end
|
||||
when '\' then
|
||||
if i < a_date_time_format.count then
|
||||
i := i + 1
|
||||
s32.append_character (a_date_time_format[i])
|
||||
else
|
||||
s32.append_character ('\')
|
||||
end
|
||||
else
|
||||
s32.append_character (c)
|
||||
end
|
||||
i := i + 1
|
||||
end
|
||||
Result := s32
|
||||
end
|
||||
|
||||
timezoned_date_time (a_utc_date_time: DATE_TIME): DATE_TIME
|
||||
do
|
||||
if utc_offset /= 0 or utc_offset_minute /= 0 then
|
||||
Result := a_utc_date_time.deep_twin
|
||||
Result.hour_add (utc_offset)
|
||||
Result.minute_add (utc_offset_minute)
|
||||
else
|
||||
Result := a_utc_date_time
|
||||
end
|
||||
end
|
||||
|
||||
short_date (a_utc_date_time: DATE_TIME): STRING_8
|
||||
local
|
||||
l_date_time: DATE_TIME
|
||||
l_now: DATE
|
||||
cy,cm,cd,y,m,d,h,i: INTEGER
|
||||
s: STRING
|
||||
l_duration: DATE_TIME_DURATION --like {DATE_TIME}.relative_duration
|
||||
do
|
||||
create l_date_time.make_now_utc
|
||||
l_duration := l_date_time.relative_duration (a_utc_date_time)
|
||||
|
||||
create l_now.make_now
|
||||
cy := l_now.year
|
||||
cm := l_now.month
|
||||
cd := l_now.day
|
||||
|
||||
l_date_time := timezoned_date_time (a_utc_date_time)
|
||||
y := l_date_time.date.year
|
||||
m := l_date_time.date.month
|
||||
d := l_date_time.date.day
|
||||
|
||||
if cy /= y then
|
||||
if attached date_time_format as l_format then
|
||||
Result := formatted_date_time (l_date_time, l_format)
|
||||
else
|
||||
Result := d.out + "/" + m.out + "/" + y.out
|
||||
end
|
||||
elseif cm /= m then
|
||||
s := l_date_time.months_text [m].string
|
||||
s.to_lower; s.put (s.item (1).as_upper, 1)
|
||||
Result := s
|
||||
Result.append (" ")
|
||||
Result.append (d.out)
|
||||
elseif cd /= d then
|
||||
s := l_date_time.months_text [m].string
|
||||
s.to_lower; s.put (s.item (1).as_upper, 1)
|
||||
Result := s
|
||||
Result.append (" ")
|
||||
Result.append (d.out)
|
||||
if l_duration.day < 7 then
|
||||
s := l_date_time.days_text [l_date_time.date.day_of_the_week].string
|
||||
s.to_lower; s.put (s.item (1).as_upper, 1)
|
||||
Result.append (" - " + s)
|
||||
end
|
||||
else
|
||||
check cd = d and cy = y and cm = m end
|
||||
h := l_date_time.time.hour
|
||||
i := l_date_time.time.minute
|
||||
if h < 10 then
|
||||
Result := "0" + h.out
|
||||
else
|
||||
Result := h.out
|
||||
end
|
||||
Result.append (":")
|
||||
if i < 10 then
|
||||
Result.append ("0")
|
||||
end
|
||||
Result.append (i.out)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
smart_date_duration (a_utc_date_time: DATE_TIME): STRING_8
|
||||
local
|
||||
l_date_time: DATE_TIME
|
||||
l_now: DATE_TIME
|
||||
l_duration: DATE_TIME_DURATION --like {DATE_TIME}.relative_duration
|
||||
l_duration_time: TIME_DURATION --like {DATE_TIME_DURATION}.time
|
||||
y,m,w,d,h,i: INTEGER
|
||||
w_now,w_utc: INTEGER
|
||||
l_s_code: NATURAL_32
|
||||
l_space_ago_string: STRING
|
||||
do
|
||||
l_s_code := ('s').natural_32_code
|
||||
l_space_ago_string := " ago"
|
||||
create l_now.make_now_utc
|
||||
l_duration := l_now.relative_duration (a_utc_date_time)
|
||||
y := l_duration.date.year
|
||||
m := l_duration.date.month
|
||||
d := l_duration.date.day
|
||||
w_now := l_now.date.week_of_year
|
||||
w_utc := a_utc_date_time.date.week_of_year
|
||||
if y > 0 then
|
||||
if y = 1 then
|
||||
Result := "last year"
|
||||
else
|
||||
Result := y.out + " years"
|
||||
-- if m > 0 then
|
||||
-- Result.append (" and " + m.out + " month")
|
||||
-- if m > 1 then
|
||||
-- Result.append_code (l_s_code)
|
||||
-- end
|
||||
-- end
|
||||
Result.append (l_space_ago_string)
|
||||
end
|
||||
elseif m > 0 then
|
||||
if m = 1 then
|
||||
Result := "last month"
|
||||
else
|
||||
Result := m.out + " months"
|
||||
-- if d > 0 then
|
||||
-- Result.append (" and " + d.out + " day")
|
||||
-- if d > 1 then
|
||||
-- Result.append_code (l_s_code)
|
||||
-- end
|
||||
-- end
|
||||
Result.append (l_space_ago_string)
|
||||
end
|
||||
elseif d >= 7 then
|
||||
w := d // 7
|
||||
if w = 1 and then w_now = w_utc + 1 then
|
||||
Result := "last week"
|
||||
else
|
||||
|
||||
Result := (w+1).out + " weeks"
|
||||
-- if d > 7 then
|
||||
-- Result.append (" and " + (d - 7).out + " day")
|
||||
-- if d - 7 > 1 then
|
||||
-- Result.append_code (l_s_code)
|
||||
-- end
|
||||
-- end
|
||||
Result.append (l_space_ago_string)
|
||||
end
|
||||
elseif d > 0 then
|
||||
if w_now /= w_utc then
|
||||
Result := "last week"
|
||||
else
|
||||
l_duration_time := l_duration.time
|
||||
if d = 1 then
|
||||
Result := "yesturday"
|
||||
else
|
||||
Result := d.out + " days"
|
||||
-- if d = 1 then
|
||||
-- h := l_duration_time.hour
|
||||
-- if h > 0 then
|
||||
-- Result.append (" and " + h.out + " hour")
|
||||
-- if h > 1 then
|
||||
-- Result.append_code (l_s_code)
|
||||
-- end
|
||||
-- end
|
||||
-- end
|
||||
Result.append (l_space_ago_string)
|
||||
end
|
||||
end
|
||||
elseif d = 0 then
|
||||
l_duration_time := l_duration.time
|
||||
h := l_duration_time.hour
|
||||
if h > 0 then
|
||||
if h = 1 then
|
||||
Result := "last hour"
|
||||
else
|
||||
Result := h.out + " hours"
|
||||
Result.append (l_space_ago_string)
|
||||
end
|
||||
else
|
||||
i := l_duration_time.minute
|
||||
if i = 0 then
|
||||
Result := l_duration_time.second.out + " second"
|
||||
if l_duration_time.second > 1 then
|
||||
Result.append_code (l_s_code)
|
||||
end
|
||||
else
|
||||
Result := i.out + " minute"
|
||||
if i > 1 then
|
||||
Result.append_code (l_s_code)
|
||||
end
|
||||
end
|
||||
Result.append (l_space_ago_string)
|
||||
end
|
||||
else
|
||||
l_date_time := timezoned_date_time (a_utc_date_time)
|
||||
if attached date_time_format as l_format then
|
||||
Result := formatted_date_time (l_date_time, l_format)
|
||||
else
|
||||
Result := l_date_time.out
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
note
|
||||
copyright: "2011-2017, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
license: "Eiffel Forum License v2 (see http://www.eiffel.com/licensing/forum.txt)"
|
||||
end
|
||||
@@ -57,14 +57,14 @@ feature {WSF_RESPONSE} -- Output
|
||||
h.put_content_length (s.count)
|
||||
end
|
||||
if not h.has_content_type then
|
||||
h.put_content_type_text_html
|
||||
h.put_content_type_with_charset ({HTTP_MIME_TYPES}.text_html, "utf-8")
|
||||
end
|
||||
res.put_header_text (h.string)
|
||||
res.put_string (s)
|
||||
end
|
||||
|
||||
note
|
||||
copyright: "2011-2012, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
copyright: "2011-2017, Jocelyn Fiat, Javier Velilla, Eiffel Software and others"
|
||||
license: "Eiffel Forum License v2 (see http://www.eiffel.com/licensing/forum.txt)"
|
||||
source: "[
|
||||
Eiffel Software
|
||||
|
||||
Reference in New Issue
Block a user